Example usage for java.nio.file Files readAllLines

List of usage examples for java.nio.file Files readAllLines

Introduction

In this page you can find the example usage for java.nio.file Files readAllLines.

Prototype

public static List<String> readAllLines(Path path, Charset cs) throws IOException 

Source Link

Document

Read all lines from a file.

Usage

From source file:ai.grakn.graql.GraqlShell.java

private static String loadQuery(String filePath) throws IOException {
    List<String> lines = Files.readAllLines(Paths.get(filePath), StandardCharsets.UTF_8);
    return lines.stream().collect(joining("\n"));
}

From source file:org.jboss.as.test.integration.logging.formatters.JsonFormatterTestCase.java

@Test
public void testDateFormat() throws Exception {
    configure(Collections.emptyMap(), Collections.emptyMap(), false);

    final String dateFormat = "yyyy-MM-dd'T'HH:mm:ssSSSZ";
    final String timezone = "GMT";

    // Change the date format and time zone
    final CompositeOperationBuilder builder = CompositeOperationBuilder.create();
    builder.addStep(Operations.createWriteAttributeOperation(FORMATTER_ADDRESS, "date-format", dateFormat));
    builder.addStep(Operations.createWriteAttributeOperation(FORMATTER_ADDRESS, "zone-id", timezone));
    executeOperation(builder.build());/*from   w ww . j  a  v  a 2s. c o  m*/

    final String msg = "Logging test: JsonFormatterTestCase.testNoExceptions";
    int statusCode = getResponse(msg,
            Collections.singletonMap(LoggingServiceActivator.LOG_EXCEPTION_KEY, "false"));
    Assert.assertTrue("Invalid response statusCode: " + statusCode, statusCode == HttpStatus.SC_OK);

    final List<String> expectedKeys = createDefaultKeys();

    for (String s : Files.readAllLines(logFile, StandardCharsets.UTF_8)) {
        if (s.trim().isEmpty())
            continue;
        try (JsonReader reader = Json.createReader(new StringReader(s))) {
            final JsonObject json = reader.readObject();

            validateDefault(json, expectedKeys, msg);
            validateStackTrace(json, false, false);

            // Validate the date format is correct. We don't want to validate the specific date, only that it's
            // parsable.
            final String jsonDate = json.getString("timestamp");
            // If the date is not parsable an exception should be thrown
            try {
                DateTimeFormatter.ofPattern(dateFormat, Locale.ROOT).withZone(ZoneId.of(timezone))
                        .parse(jsonDate);
            } catch (Exception e) {
                Assert.fail(String.format("Failed to parse %s with pattern %s and zone %s: %s", jsonDate,
                        dateFormat, timezone, e.getMessage()));
            }
        }
    }
}

From source file:de.fosd.jdime.strategy.LinebasedStrategy.java

@Override
public void dumpFile(FileArtifact artifact, boolean graphical) throws IOException { //TODO: optionally save to outputfile
    List<String> lines = Files.readAllLines(artifact.getFile().toPath(), StandardCharsets.UTF_8);

    for (String line : lines) {
        System.out.println(line);
    }//w  w  w  .  jav a  2 s .co  m
}

From source file:squash.deployment.lambdas.AngularjsAppCustomResourceLambda.java

/**
 * Implementation for the AWS Lambda function backing the AngularjsApp
 * resource.//from   w  w  w .  ja  va  2 s .  c  o  m
 * 
 * <p>
 * This lambda requires the following environment variables:
 * <ul>
 * <li>WebsiteBucket - name of S3 bucket serving the booking website.</li>
 * <li>AngularjsZipBucket - S3 bucket holding the Angularjs app zip file.</li>
 * <li>CognitoIdentityPoolId - id of the Cognito Identity Pool.</li>
 * <li>CognitoUserPoolId - id of the Cognito User Pool.</li>
 * <li>CognitoUserPoolIdentityProviderName - Name of user pool identity provider.</li>
 * <li>JavascriptClientAppId - id of the Cognito User Pool app to use from javascript.</li>
 * <li>ApiGatewayBaseUrl - base Url of the ApiGateway Api.</li>
 * <li>Region - the AWS region in which the Cloudformation stack is created.</li>
 * <li>Revision - integer incremented to force stack updates to update this resource.</li>
 * </ul>
 *
 * <p>On success, it returns the following output to Cloudformation:
 * <ul>
 *    <li>WebsiteURL - Url of the Angularjs website.</li>
 * </ul>
 *
 * <p>Updates will delete the previous deployment and replace it with the new one.
 *
 * @param request
 *            request parameters as provided by the CloudFormation service
 * @param context
 *            context as provided by the CloudFormation service
 */
@Override
public Object handleRequest(Map<String, Object> request, Context context) {

    LambdaLogger logger = context.getLogger();
    logger.log("Starting AngularjsApp custom resource handleRequest");

    // Handle standard request parameters
    Map<String, String> standardRequestParameters = LambdaInputLogger.logStandardRequestParameters(request,
            logger);
    String requestType = standardRequestParameters.get("RequestType");

    // Handle required environment variables
    logger.log("Logging required environment variables for custom resource request");
    String websiteBucket = System.getenv("WebsiteBucket");
    String angularjsZipBucket = System.getenv("AngularjsZipBucket");
    String cognitoIdentityPoolId = System.getenv("CognitoIdentityPoolId");
    String cognitoUserPoolId = System.getenv("CognitoUserPoolId");
    String cognitoUserPoolIdentityProviderName = System.getenv("CognitoUserPoolIdentityProviderName");
    String javascriptClientAppId = System.getenv("JavascriptClientAppId");
    String apiGatewayBaseUrl = System.getenv("ApiGatewayBaseUrl");
    String region = System.getenv("AWS_REGION");
    String revision = System.getenv("Revision");

    // Log out our required environment variables
    logger.log("WebsiteBucket: " + websiteBucket);
    logger.log("AngularjsZipBucket: " + angularjsZipBucket);
    logger.log("CognitoIdentityPoolId: " + cognitoIdentityPoolId);
    logger.log("CognitoUserPoolId: " + cognitoUserPoolId);
    logger.log("CognitoUserPoolIdentityProviderName: " + cognitoUserPoolIdentityProviderName);
    logger.log("JavascriptClientAppId: " + javascriptClientAppId);
    logger.log("ApiGatewayBaseUrl: " + apiGatewayBaseUrl);
    logger.log("Region: " + region);
    logger.log("Revision: " + revision);

    // API calls below can sometimes give access denied errors during stack
    // creation which I think is bc required new roles have not yet propagated
    // across AWS. We sleep here to allow time for this propagation.
    try {
        Thread.sleep(10000);
    } catch (InterruptedException e) {
        logger.log("Sleep to allow new roles to propagate has been interrupted.");
    }

    // Prepare our response to be sent in the finally block
    CloudFormationResponder cloudFormationResponder = new CloudFormationResponder(standardRequestParameters,
            "DummyPhysicalResourceId");
    // Initialise failure response, which will be changed on success
    String responseStatus = "FAILED";

    String websiteURL = null;
    try {
        cloudFormationResponder.initialise();

        if (requestType.equals("Create") || requestType.equals("Update")) {

            // On updates we clear out the app first
            if (requestType.equals("Update")) {
                deleteAngularjsApp(websiteBucket, logger);
            }

            // Get the Angularjs app's zip file
            try {
                logger.log("Downloading Angularjs zip from S3");
                IS3TransferManager transferManager = getS3TransferManager();
                String zipDownloadPath = "/tmp/AngularjsApp.zip";
                File downloadedFile = new File(zipDownloadPath);
                TransferUtils.waitForS3Transfer(
                        transferManager.download(angularjsZipBucket, "AngularjsApp.zip", downloadedFile),
                        logger);
                logger.log("Downloaded Angularjs zip successfully from S3");

                // Modify the Bookings and Identity Service files to point to the
                // correct Cognito data, ApiGateway base url, and region.
                logger.log("Extracting Angularjs zip");
                String extractPath = "/tmp";
                try {
                    ZipFile zipFile = new ZipFile(zipDownloadPath);
                    // Will produce /tmp/app/app.js etc
                    zipFile.extractAll(extractPath);
                } catch (ZipException e) {
                    logger.log("Caught a ZipException Exception: " + e.getMessage());
                    throw e;
                }
                logger.log("Extracted Angularjs zip");

                logger.log(
                        "Modifying the Bookings and Identity Services to point to the correct ApiGatewayBaseUrl, Cognito data, and region");
                String fileContent;
                String filePath = extractPath + "/app/sqawsh.min.js";
                try (FileInputStream inputStream = new FileInputStream(filePath)) {
                    fileContent = IOUtils.toString(inputStream);
                }
                fileContent = fileContent.replace("bookingregiontobereplaced", region)
                        .replace("bookingurltobereplaced", apiGatewayBaseUrl)
                        .replace("bookingbuckettobereplaced", websiteBucket)
                        .replace("identityregiontobereplaced", region)
                        .replace("identitypoolidtobereplaced", cognitoIdentityPoolId)
                        .replace("identityuserpoolidtobereplaced", cognitoUserPoolId)
                        .replace("identityprovidernametobereplaced", cognitoUserPoolIdentityProviderName)
                        .replace("identityappidtobereplaced", javascriptClientAppId);

                FileUtils.writeStringToFile(new File(filePath), fileContent);
                logger.log(
                        "Modified the Bookings and Identity Services to point to the correct ApiGatewayBaseUrl, Cognito data, and region");

                // We will later modify the gzip-ed filenames to add a revving suffix.
                // But before we gzip, we need to modify the revved file links in
                // index.html
                String revvingSuffix = System.getenv("RevvingSuffix");
                File appPath = new File("/tmp/app");
                logger.log("Modifying links to revved files in index.html");
                Path indexPath = new File(appPath, "index.html").toPath();
                Charset charset = StandardCharsets.UTF_8;
                List<String> newLines = new ArrayList<>();
                for (String line : Files.readAllLines(indexPath, charset)) {
                    if (line.contains("googleapis") || line.contains("cloudflare") || line.contains("maxcdn")) {
                        // Don't alter lines linking to cdn-s. They are already revved.
                        newLines.add(line);
                    } else {
                        newLines.add(line.replace(".js", "_" + revvingSuffix + ".js").replace(".css",
                                "_" + revvingSuffix + ".css"));
                    }
                }
                Files.write(indexPath, newLines, charset);
                logger.log("Modified links to revved files in index.html");

                // GZIP all js, css, and html files within app folder
                logger.log("GZip-ing files in app folder to enable serving gzip-ed from S3");
                squash.deployment.lambdas.utils.FileUtils.gzip(Arrays.asList(appPath), Collections.emptyList(),
                        logger);
                logger.log("GZip-ed files in app folder to enable serving gzip-ed from S3");

                // Rev the js and css files by appending revving-suffix to names - for
                // cache-ing
                logger.log("Appending revving suffix to js and css files in app folder");
                squash.deployment.lambdas.utils.FileUtils.appendRevvingSuffix(revvingSuffix, appPath.toPath(),
                        logger);
                logger.log("Appended revving suffix to js and css files in app folder");

                // Upload the modified app to the S3 website bucket
                logger.log("Uploading modified Angularjs app to S3 website bucket");
                // Will produce <S3BucketRoot>/app/sqawsh.min.js etc
                TransferUtils.waitForS3Transfer(transferManager.uploadDirectory(websiteBucket, "app",
                        new File(extractPath + "/app"), true), logger);
                logger.log("Uploaded modified Angularjs app to S3 website bucket");

                // Add gzip content-encoding metadata to zip-ed files
                logger.log("Updating metadata on modified Angularjs app in S3 bucket");
                TransferUtils.addGzipContentEncodingMetadata(websiteBucket, Optional.of("app"), logger);
                logger.log("Updated metadata on modified Angularjs app in S3 bucket");

                // Upload Cognito SDKs and their dependencies - these should all be
                // zipped first. N.B. We also append filenames with the revving
                // suffix.
                logger.log("About to upload Cognito libraries");
                List<ImmutableTriple<String, String, byte[]>> cognitoLibraries = new ArrayList<>();
                cognitoLibraries.add(new ImmutableTriple<>("Cognito SDK",
                        "aws-cognito-sdk.min_" + revvingSuffix + ".js", IOUtils.toByteArray(new URL(
                                "https://raw.githubusercontent.com/aws/amazon-cognito-identity-js/master/dist/aws-cognito-sdk.min.js"))));
                cognitoLibraries.add(new ImmutableTriple<>("Cognito Identity SDK",
                        "amazon-cognito-identity.min_" + revvingSuffix + ".js", IOUtils.toByteArray(new URL(
                                "https://raw.githubusercontent.com/aws/amazon-cognito-identity-js/master/dist/amazon-cognito-identity.min.js"))));
                cognitoLibraries.add(new ImmutableTriple<>("Big Integer Library",
                        "jsbn_" + revvingSuffix + ".js",
                        IOUtils.toByteArray(new URL("http://www-cs-students.stanford.edu/~tjw/jsbn/jsbn.js"))));
                cognitoLibraries.add(new ImmutableTriple<>("Big Integer Library 2",
                        "jsbn2_" + revvingSuffix + ".js", IOUtils.toByteArray(
                                new URL("http://www-cs-students.stanford.edu/~tjw/jsbn/jsbn2.js"))));

                // The SJCL still seems to need configuring to include the bytes
                // codec, despite 1.0 of Cognito Idp saying it had removed this
                // dependency. So for now we get this bytes-codec-configured version
                // from our resources.
                String sjcl_library;
                try {
                    sjcl_library = IOUtils.toString(AngularjsAppCustomResourceLambda.class
                            .getResourceAsStream("/squash/deployment/lambdas/sjcl.js"));
                } catch (IOException e) {
                    logger.log("Exception caught reading sjcl.js file: " + e.getMessage());
                    throw new Exception("Exception caught reading sjcl.js file");
                }
                logger.log("Read modified SJCL library from resources");
                cognitoLibraries.add(new ImmutableTriple<>("Stanford Javascript Crypto Library",
                        "sjcl_" + revvingSuffix + ".js", sjcl_library.getBytes(Charset.forName("UTF-8"))));

                for (ImmutableTriple<String, String, byte[]> cognitoLibrary : cognitoLibraries) {
                    logger.log("Uploading a Cognito library to S3 website bucket. Library name: "
                            + cognitoLibrary.left);

                    byte[] zippedLibrary = squash.deployment.lambdas.utils.FileUtils.gzip(cognitoLibrary.right,
                            logger);
                    ByteArrayInputStream libraryAsGzippedStream = new ByteArrayInputStream(zippedLibrary);
                    ObjectMetadata metadata = new ObjectMetadata();
                    metadata.setContentLength(zippedLibrary.length);
                    metadata.setContentEncoding("gzip");
                    String keyName = "app/components/identity/cognito/" + cognitoLibrary.middle;
                    logger.log("Uploading to key: " + keyName);
                    PutObjectRequest putObjectRequest = new PutObjectRequest(websiteBucket, keyName,
                            libraryAsGzippedStream, metadata);
                    TransferUtils.waitForS3Transfer(transferManager.upload(putObjectRequest), logger);
                    logger.log("Uploaded a Cognito library to S3 website bucket: " + cognitoLibrary.left);
                }

                // Add cache-control metadata to files. Css and js files will have
                // 1-year cache validity, since they are rev-ved.
                logger.log("Updating cache-control metadata on angular app in S3 bucket");
                TransferUtils.addCacheControlHeader("max-age=31536000", websiteBucket, Optional.of("app"),
                        ".js", logger);
                TransferUtils.addCacheControlHeader("max-age=31536000", websiteBucket, Optional.of("app"),
                        ".css", logger);
                // All html must revalidate every time
                TransferUtils.addCacheControlHeader("no-cache, must-revalidate", websiteBucket,
                        Optional.of("app"), ".html", logger);
                logger.log("Updated cache-control metadata on angular app in S3 bucket");

                // App content must be public so it can be served from the website
                logger.log("Modifying Angularjs app ACL in S3 website bucket");
                TransferUtils.setPublicReadPermissionsOnBucket(websiteBucket, Optional.of("app/"), logger);
                logger.log("Modified Angularjs app ACL in S3 website bucket");

            } catch (MalformedInputException mie) {
                logger.log("Caught a MalformedInputException: " + mie.getMessage());
                throw mie;
            } catch (IOException ioe) {
                logger.log("Caught an IO Exception: " + ioe.getMessage());
                throw ioe;
            }

            websiteURL = "http://" + websiteBucket + ".s3-website-" + region + ".amazonaws.com/app/index.html";
            ;
        } else if (requestType.equals("Delete")) {
            logger.log("Delete request - so deleting the app");
            deleteAngularjsApp(websiteBucket, logger);
        }

        responseStatus = "SUCCESS";
        return null;
    } catch (AmazonServiceException ase) {
        ExceptionUtils.logAmazonServiceException(ase, logger);
        return null;
    } catch (AmazonClientException ace) {
        ExceptionUtils.logAmazonClientException(ace, logger);
        return null;
    } catch (Exception e) {
        logger.log("Exception caught in AngularjsApp Lambda: " + e.getMessage());
        return null;
    } finally {
        // Send response to CloudFormation
        cloudFormationResponder.addKeyValueOutputsPair("WebsiteURL", websiteURL);
        cloudFormationResponder.sendResponse(responseStatus, logger);
    }
}

From source file:com.att.aro.datacollector.ioscollector.utilities.AppSigningHelper.java

private void verifyEntitlementsUpdated() throws IOSAppException {
    /*//from   w  w w .j  a va  2s  . co  m
     * Not able to use file modified time or file size to
     * verify file got updated. Look for a string that 
     * should have been replaced instead. 
     * (entitlements.plist is a small file)
     */
    try {
        List<String> lines = Files.readAllLines(Paths.get(ENTITLEMENTS_PLIST_PATH), StandardCharsets.UTF_8);
        for (String line : lines) {
            if (line.contains(VO_APP_ID_VALUE)) {
                throw new IOSAppException(ErrorCodeRegistry.getFileUpdateError(ENTITLEMENTS_PLIST_FILENAME));
            }
        }
    } catch (IOException e) {
        LOGGER.error("Error verifying entitlements.plist was updated", e);
    }
}

From source file:org.epics.archiverappliance.config.DefaultConfigService.java

@Override
public void initialize(ServletContext sce) throws ConfigException {
    this.servletContext = sce;
    String contextPath = sce.getContextPath();
    logger.info("DefaultConfigService was created with a servlet context " + contextPath);

    try {//from  w  w w. j a  v a 2  s  .c om
        String pathToVersionTxt = sce.getRealPath("ui/comm/version.txt");
        logger.debug("The full path to the version.txt is " + pathToVersionTxt);
        List<String> lines = Files.readAllLines(Paths.get(pathToVersionTxt), Charset.forName("UTF-8"));
        for (String line : lines) {
            configlogger.info(line);
        }
    } catch (Throwable t) {
        logger.fatal("Unable to determine appliance version", t);
    }

    try {
        // We first try Java system properties for this appliance's identity
        // If a property is not defined, then we check the environment.
        // This gives us the ability to cater to unit tests as well as running using buildAndDeploy scripts without touching the server.xml file.
        // Probably not the most standard way but suited to this need.
        // Finally, we use the local machine's hostname as the myidentity.
        myIdentity = System.getProperty(ARCHAPPL_MYIDENTITY);
        if (myIdentity == null) {
            myIdentity = System.getenv(ARCHAPPL_MYIDENTITY);
            if (myIdentity != null) {
                logger.info("Obtained my identity from environment variable " + myIdentity);
            } else {
                logger.info("Using the local machine's hostname " + myIdentity + " as my identity");
                myIdentity = InetAddress.getLocalHost().getCanonicalHostName();
            }
            if (myIdentity == null) {
                throw new ConfigException("Unable to determine identity of this appliance");
            }
        } else {
            logger.info("Obtained my identity from Java system properties " + myIdentity);
        }

        logger.info("My identity is " + myIdentity);
    } catch (Exception ex) {
        String msg = "Cannot determine this appliance's identity using either the environment variable "
                + ARCHAPPL_MYIDENTITY + " or the java system property " + ARCHAPPL_MYIDENTITY;
        configlogger.fatal(msg);
        throw new ConfigException(msg, ex);
    }
    // Appliances should be local and come straight from persistence.
    try {
        appliances = AppliancesList.loadAppliancesXML(servletContext);
    } catch (Exception ex) {
        throw new ConfigException("Exception loading appliances.xml", ex);
    }

    myApplianceInfo = appliances.get(myIdentity);
    if (myApplianceInfo == null)
        throw new ConfigException("Unable to determine applianceinfo using identity " + myIdentity);
    configlogger.info("My identity is " + myApplianceInfo.getIdentity() + " and my mgmt URL is "
            + myApplianceInfo.getMgmtURL());

    // To make sure we are not starting multiple appliance with the same identity, we make sure that the hostnames match
    try {
        String machineHostName = InetAddress.getLocalHost().getCanonicalHostName();
        String[] myAddrParts = myApplianceInfo.getClusterInetPort().split(":");
        String myHostNameFromInfo = myAddrParts[0];
        if (myHostNameFromInfo.equals("localhost")) {
            logger.debug(
                    "Using localhost for the cluster inet port. If you are indeed running a cluster, the cluster members will not join the cluster.");
        } else if (myHostNameFromInfo.equals(machineHostName)) {
            logger.debug(
                    "Hostname from config and hostname from InetAddress match exactly; we are correctly configured "
                            + machineHostName);
        } else if (InetAddressValidator.getInstance().isValid(myHostNameFromInfo)) {
            logger.debug("Using ipAddress for cluster config " + myHostNameFromInfo);
        } else {
            String msg = "The hostname from appliances.xml is " + myHostNameFromInfo
                    + " and from a call to InetAddress.getLocalHost().getCanonicalHostName() (typially FQDN) is "
                    + machineHostName
                    + ". These are not identical. They are probably equivalent but to prevent multiple appliances binding to the same identity we enforce this equality.";
            configlogger.fatal(msg);
            throw new ConfigException(msg);
        }
    } catch (UnknownHostException ex) {
        configlogger.error(
                "Got an UnknownHostException when trying to determine the hostname. This happens when DNS is not set correctly on this machine (for example, when using VM's. See the documentation for InetAddress.getLocalHost().getCanonicalHostName()");
    }

    try {
        String archApplPropertiesFileName = System.getProperty(ARCHAPPL_PROPERTIES_FILENAME);
        if (archApplPropertiesFileName == null) {
            archApplPropertiesFileName = System.getenv(ARCHAPPL_PROPERTIES_FILENAME);
        }
        if (archApplPropertiesFileName == null) {
            archApplPropertiesFileName = new URL(this.getClass().getClassLoader()
                    .getResource(DEFAULT_ARCHAPPL_PROPERTIES_FILENAME).toString()).getPath();
            configlogger.info(
                    "Loading archappl.properties from the webapp classpath " + archApplPropertiesFileName);
        } else {
            configlogger.info("Loading archappl.properties using the environment/JVM property from "
                    + archApplPropertiesFileName);
        }
        try (InputStream is = new FileInputStream(new File(archApplPropertiesFileName))) {
            archapplproperties.load(is);
            configlogger.info(
                    "Done loading installation specific properties file from " + archApplPropertiesFileName);
        } catch (Exception ex) {
            throw new ConfigException(
                    "Exception loading installation specific properties file " + archApplPropertiesFileName,
                    ex);
        }
    } catch (ConfigException cex) {
        throw cex;
    } catch (Exception ex) {
        configlogger.fatal("Exception loading the appliance properties file", ex);
    }

    switch (contextPath) {
    case "/mgmt":
        warFile = WAR_FILE.MGMT;
        this.mgmtRuntime = new MgmtRuntimeState(this);
        break;
    case "/engine":
        warFile = WAR_FILE.ENGINE;
        this.engineContext = new EngineContext(this);
        break;
    case "/retrieval":
        warFile = WAR_FILE.RETRIEVAL;
        this.retrievalState = new RetrievalState(this);
        break;
    case "/etl":
        this.etlPVLookup = new PBThreeTierETLPVLookup(this);
        warFile = WAR_FILE.ETL;
        break;
    default:
        logger.error("We seem to have introduced a new component into the system " + contextPath);
    }

    String pvName2KeyMappingClass = this.getInstallationProperties()
            .getProperty(ARCHAPPL_PVNAME_TO_KEY_MAPPING_CLASSNAME);
    if (pvName2KeyMappingClass == null || pvName2KeyMappingClass.equals("")
            || pvName2KeyMappingClass.length() < 1) {
        logger.info("Using the default key mapping class");
        pvName2KeyConverter = new ConvertPVNameToKey();
        pvName2KeyConverter.initialize(this);
    } else {
        try {
            logger.info("Using " + pvName2KeyMappingClass + " as the name to key mapping class");
            pvName2KeyConverter = (PVNameToKeyMapping) Class.forName(pvName2KeyMappingClass).newInstance();
            pvName2KeyConverter.initialize(this);
        } catch (Exception ex) {
            logger.fatal("Cannot initialize pv name to key mapping class " + pvName2KeyMappingClass, ex);
            throw new ConfigException(
                    "Cannot initialize pv name to key mapping class " + pvName2KeyMappingClass, ex);
        }
    }

    String runtimeFieldsListStr = this.getInstallationProperties()
            .getProperty("org.epics.archiverappliance.config.RuntimeKeys");
    if (runtimeFieldsListStr != null && !runtimeFieldsListStr.isEmpty()) {
        logger.debug("Got runtime fields from the properties file " + runtimeFieldsListStr);
        String[] runTimeFieldsArr = runtimeFieldsListStr.split(",");
        for (String rf : runTimeFieldsArr) {
            this.runTimeFields.add(rf.trim());
        }
    }

    startupExecutor = Executors.newScheduledThreadPool(1, new ThreadFactory() {
        @Override
        public Thread newThread(Runnable r) {
            Thread t = new Thread(r);
            t.setName("Startup executor");
            return t;
        }
    });

    this.addShutdownHook(new Runnable() {
        @Override
        public void run() {
            logger.info("Shutting down startup scheduled executor...");
            startupExecutor.shutdown();
        }
    });

    this.startupState = STARTUP_SEQUENCE.READY_TO_JOIN_APPLIANCE;
    if (this.warFile == WAR_FILE.MGMT) {
        logger.info("Scheduling webappReady's for the mgmt webapp ");
        MgmtPostStartup mgmtPostStartup = new MgmtPostStartup(this);
        ScheduledFuture<?> postStartupFuture = startupExecutor.scheduleAtFixedRate(mgmtPostStartup, 10, 20,
                TimeUnit.SECONDS);
        mgmtPostStartup.setCancellingFuture(postStartupFuture);
    } else {
        logger.info("Scheduling webappReady's for the non-mgmt webapp " + this.warFile.toString());
        NonMgmtPostStartup nonMgmtPostStartup = new NonMgmtPostStartup(this, this.warFile.toString());
        ScheduledFuture<?> postStartupFuture = startupExecutor.scheduleAtFixedRate(nonMgmtPostStartup, 10, 20,
                TimeUnit.SECONDS);
        nonMgmtPostStartup.setCancellingFuture(postStartupFuture);
    }

    // Measure some JMX metrics once a minute
    startupExecutor.scheduleAtFixedRate(new Runnable() {
        @Override
        public void run() {
            processMetrics.takeMeasurement();
        }
    }, 60, 60, TimeUnit.SECONDS);
}

From source file:org.deeplearning4j.legacyExamples.rnn.SparkLSTMCharacterExample.java

/**
 * Load data from a file, and remove any invalid characters.
 * Data is returned as a single large String
 *//*from  w w  w. j ava 2s. c  o  m*/
private static String getDataAsString(String filePath) throws IOException {
    List<String> lines = Files.readAllLines(new File(filePath).toPath(), Charset.defaultCharset());
    StringBuilder sb = new StringBuilder();
    for (String line : lines) {
        char[] chars = line.toCharArray();
        for (int i = 0; i < chars.length; i++) {
            if (CHAR_TO_INT.containsKey(chars[i]))
                sb.append(chars[i]);
        }
        sb.append("\n");
    }

    return sb.toString();
}

From source file:edu.ehu.galan.lite.Lite.java

private static void runner(String lang, String resources, List<String> algs, Corpus corpus, String outDir) {
    System.setProperty("net.sf.ehcache.enableShutdownHook", "true");
    if (CacheManager.getCacheManager("ehcacheLitet.xml") == null) {
        CacheManager.create("ehcacheLitet.xml");
    }//from ww  w  . j av  a 2s  . co  m
    Properties props = new Properties();
    try {
        props.load(new FileInputStream(resources + "lite/configs/general.conf"));
    } catch (IOException ex) {
        System.err.println("Check the resources dir: " + ex.getMessage());
    }
    AbstractDocumentReader parser = null;
    AlgorithmRunner runner = new AlgorithmRunner();
    CValueAlgortithm cvalue = new CValueAlgortithm();
    switch (lang) {
    case "en":
        cvalue.addNewProcessingFilter(new AdjPrepNounFilter());
        parser = new PlainTextDocumentReaderIXAEn();
        break;
    case "es":
        cvalue.addNewProcessingFilter(
                new edu.ehu.galan.lite.algorithms.ranked.unsupervised.cvalue.filters.spanish.NounAdjOpenFilter());
        parser = new PlainTextDocumentReaderIXAEs();
        break;
    }
    runner.submitAlgorithm(cvalue);
    //TODO: do this via java reflection
    for (int i = 0; i < algs.size(); i++) {
        switch (algs.get(i)) {
        case "TFIDF": {
            TFIDFAlgorithm tf = new TFIDFAlgorithm(new CaseStemmer(CaseStemmer.CaseType.lowercase), lang);
            runner.submitAlgorithm(tf);
            break;
        }
        case "FreeLing NER": {
            FreeLingNerAlgorithm alg = null;
            switch (lang) {
            case "en":
                alg = new FreeLingNerAlgorithm(resources + "lite" + File.separator + "configs" + File.separator
                        + "freeling" + File.separator + "enPOSMW.cfg");
                break;
            case "es":
                alg = new FreeLingNerAlgorithm(resources + "lite" + File.separator + "configs" + File.separator
                        + "freeling" + File.separator + "esPOSMW.cfg");
                break;
            }
            runner.submitAlgorithm(alg);
            break;
        }
        case "KP-Miner": {
            if (lang.equals("en")) {
                KPMinerAlgorithm kp = new KPMinerAlgorithm();
                runner.submitAlgorithm(kp);
            }
            break;
        }
        case "Shallow Parsing Grammar": {
            if (lang.equals("en")) {
                ShallowParsingGrammarAlgortithm a = new ShallowParsingGrammarAlgortithm(resources + "lite"
                        + File.separator + "grammars" + File.separator + "Cg2EnGrammar.grammar",
                        props.getProperty("tmpDir") + File.separator + "cg3");
                runner.submitAlgorithm(a);
            }
            break;
        }
        case "RAKE": {
            RakeAlgorithm ex = new RakeAlgorithm();
            switch (lang) {
            case "en":
                ex.loadStopWordsList(resources + "lite/stopWordLists/RakeStopLists/SmartStopListEn");
                break;
            case "es":
                ex.loadStopWordsList(resources + "lite/stopWordLists/RakeStopLists/SpanishCustomEs");
                break;
            }
            ex.loadPunctStopWord(resources + "lite/stopWordLists/RakeStopLists/RakePunctDefaultStopList");
            runner.submitAlgorithm(ex);
            break;
        }
        }
    }
    //load stop list
    List<String> standardStop = null;
    try {
        standardStop = Files.readAllLines(Paths.get(resources + "lite/stopWordLists/standardStopList"),
                StandardCharsets.UTF_8);

    } catch (IOException e1x) {
        System.err.println("Check your resources dir: " + e1x.getMessage());
    }
    WikiminnerHelper helper = WikiminnerHelper.getInstance(resources);
    helper.setLanguage(lang);
    //we may operate in local mode (using Wikiminer as API instead of interacting via REST api        
    helper.setLocalMode(props.getProperty("localMode").equals("true"),
            "/home/angel/nfs/wikiminer/configs/wikipedia");
    WikiMinerMap wikimapping = new WikiMinerMap(resources, helper);
    CValueWikiDisambiguator disambiguator = new CValueWikiDisambiguator(resources, helper);
    CValueWikiRelationship relate = new CValueWikiRelationship(resources, helper);
    WikipediaData data = new WikipediaData(resources, helper);
    if (!(props.getProperty("localMode")).equals("true")) {
        helper.openConnection();
    }
    helper.openConnection();
    while (!corpus.getDocQueue().isEmpty()) {
        Document doc = corpus.getDocQueue().poll();
        doc.setSource(Document.SourceType.wikipedia);
        parser.readSource(doc.getPath());
        doc.setSentenceList(parser.getSentenceList());
        doc.setTokenList(parser.getTokenizedSentenceList());
        System.out.println(doc.getName());
        runner.runAlgorihms(doc, resources);
        doc.applyGlobalStopWordList(standardStop);
        doc.mapThreshold(1.9f, new String[] { "CValue" });
        doc.mapThreshold(0.00034554f, new String[] { "TFIDF" });
        doc.removeAndMixTerms();
        //map document
        wikimapping.mapCorpus(doc);
        disambiguator.disambiguateTopics(doc);
        //we may disambiguate topics that do not disambiguated correctly
        DuplicateRemoval.disambiguationRemoval(doc);
        DuplicateRemoval.topicDuplicateRemoval(doc);
        //obtain the wiki links,labels, etc
        data.processDocument(doc);
        //measure domain relatedness
        relate.relate(doc);
        //save the results
        Document.saveJsonToDir(outDir, doc);
    }
    if (props.getProperty("localMode").equals("true")) {
        helper.closeWikipedia();
    } else {
        helper.closeConnection();
    }
    CacheManager.getInstance().shutdown();
    System.exit(0);
}

From source file:org.jboss.as.test.integration.logging.formatters.XmlFormatterTestCase.java

@Test
public void testDateFormat() throws Exception {
    configure(Collections.emptyMap(), Collections.emptyMap(), false);

    final String dateFormat = "yyyy-MM-dd'T'HH:mm:ssSSSZ";
    final String timezone = "GMT";

    // Change the date format and time zone
    final CompositeOperationBuilder builder = CompositeOperationBuilder.create();
    builder.addStep(Operations.createWriteAttributeOperation(FORMATTER_ADDRESS, "date-format", dateFormat));
    builder.addStep(Operations.createWriteAttributeOperation(FORMATTER_ADDRESS, "zone-id", timezone));
    executeOperation(builder.build());//from  ww  w.j a  v  a2  s . c  om

    final String msg = "Logging test: XmlFormatterTestCase.testNoExceptions";
    int statusCode = getResponse(msg,
            Collections.singletonMap(LoggingServiceActivator.LOG_EXCEPTION_KEY, "false"));
    Assert.assertEquals("Invalid response statusCode: " + statusCode, statusCode, HttpStatus.SC_OK);

    final List<String> expectedKeys = createDefaultKeys();

    final DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
    final DocumentBuilder documentBuilder = factory.newDocumentBuilder();

    for (String s : Files.readAllLines(logFile, StandardCharsets.UTF_8)) {
        if (s.trim().isEmpty())
            continue;
        final Document doc = documentBuilder.parse(new InputSource(new StringReader(s)));

        validateDefault(doc, expectedKeys, msg);
        validateStackTrace(doc, false, false);

        // Validate the date format is correct. We don't want to validate the specific date, only that it's
        // parsable.
        final NodeList timestampNode = doc.getElementsByTagName("timestamp");
        Assert.assertEquals(1, timestampNode.getLength());
        final String xmlDate = timestampNode.item(0).getTextContent();
        // If the date is not parsable an exception should be thrown
        try {
            DateTimeFormatter.ofPattern(dateFormat, Locale.ROOT).withZone(ZoneId.of(timezone)).parse(xmlDate);
        } catch (Exception e) {
            Assert.fail(String.format("Failed to parse %s with pattern %s and zone %s: %s", xmlDate, dateFormat,
                    timezone, e.getMessage()));
        }
    }
}

From source file:com.shazam.dataengineering.pipelinebuilder.DeploymentActionTest.java

@Test
public void writingReportShouldCreateJsonFile() throws Exception {
    DeploymentAction action = new DeploymentAction(getMockAbstractBuild(), new HashMap<S3Environment, String>(),
            new AnonymousAWSCredentials());

    Date date = new Date();

    Method method = action.getClass().getDeclaredMethod("writeReport", Date.class, String.class, Boolean.TYPE);
    method.setAccessible(true);/*from w ww  .  jav  a 2 s .c  o  m*/

    method.invoke(action, date, "test-1234", true);

    File logFile = new File(testFolder.getRoot(), "deployment.log");
    assertTrue(logFile.exists());

    List<String> jsonContent = Files.readAllLines(logFile.toPath(), Charset.defaultCharset());
    assertEquals(1, jsonContent.size());

    JSONParser jsonParser = new JSONParser();
    JSONObject log = (JSONObject) jsonParser.parse(jsonContent.get(0));
    JSONArray deployments = (JSONArray) log.get("deployments");
    JSONObject deployment = (JSONObject) deployments.get(0);

    assertEquals(String.valueOf(date.getTime()), deployment.get("date").toString());
    assertEquals("SYSTEM", deployment.get("username").toString());
    assertEquals("true", deployment.get("status").toString());
    assertEquals("test-1234", deployment.get("pipelineId"));
}