List of usage examples for java.io File getPath
public String getPath()
From source file:net.sourceforge.msscodefactory.cfasterisk.v2_4.CFAsteriskSaxOracleLoaderCLI.CFAsteriskSaxOracleLoaderCLI.java
public static void main(String args[]) { final String S_ProcName = "CFAsteriskSaxOracleLoaderCLI.main() "; initConsoleLog();// w w w .jav a2s .c om int numArgs = args.length; if (numArgs >= 2) { String homeDirName = System.getProperty("HOME"); if (homeDirName == null) { homeDirName = System.getProperty("user.home"); if (homeDirName == null) { log.message(S_ProcName + "ERROR: Home directory not set"); return; } } File homeDir = new File(homeDirName); if (!homeDir.exists()) { log.message(S_ProcName + "ERROR: Home directory \"" + homeDirName + "\" does not exist"); return; } if (!homeDir.isDirectory()) { log.message(S_ProcName + "ERROR: Home directory \"" + homeDirName + "\" is not a directory"); return; } CFAsteriskConfigurationFile cFAsteriskConfig = new CFAsteriskConfigurationFile(); String cFAsteriskConfigFileName = homeDir.getPath() + File.separator + ".cfasteriskoraclerc"; cFAsteriskConfig.setFileName(cFAsteriskConfigFileName); File cFAsteriskConfigFile = new File(cFAsteriskConfigFileName); if (!cFAsteriskConfigFile.exists()) { cFAsteriskConfig.setDbServer("127.0.0.1"); cFAsteriskConfig.setDbPort(1526); cFAsteriskConfig.setDbDatabase("CFAst24"); cFAsteriskConfig.setDbUserName("system"); cFAsteriskConfig.setDbPassword("edit-me-please"); cFAsteriskConfig.save(); log.message(S_ProcName + "INFO: Created configuration file " + cFAsteriskConfigFileName + ", please edit configuration and restart."); return; } if (!cFAsteriskConfigFile.isFile()) { log.message(S_ProcName + "ERROR: Proposed configuration file " + cFAsteriskConfigFileName + " is not a file."); return; } if (!cFAsteriskConfigFile.canRead()) { log.message(S_ProcName + "ERROR: Permission denied attempting to read configuration file " + cFAsteriskConfigFileName); return; } cFAsteriskConfig.load(); boolean fastExit = false; CFAsteriskClientConfigurationFile cFDbTestClientConfig = new CFAsteriskClientConfigurationFile(); String cFDbTestClientConfigFileName = homeDir.getPath() + File.separator + ".cfdbtestclientrc"; cFDbTestClientConfig.setFileName(cFDbTestClientConfigFileName); File cFDbTestClientConfigFile = new File(cFDbTestClientConfigFileName); if (!cFDbTestClientConfigFile.exists()) { String cFDbTestKeyStoreFileName = homeDir.getPath() + File.separator + ".msscfjceks"; cFDbTestClientConfig.setKeyStore(cFDbTestKeyStoreFileName); InetAddress localHost; try { localHost = InetAddress.getLocalHost(); } catch (UnknownHostException e) { localHost = null; } if (localHost == null) { log.message(S_ProcName + "ERROR: LocalHost is null"); return; } String hostName = localHost.getHostName(); if ((hostName == null) || (hostName.length() <= 0)) { log.message("ERROR: LocalHost.HostName is null or empty"); return; } String userName = System.getProperty("user.name"); if ((userName == null) || (userName.length() <= 0)) { log.message("ERROR: user.name is null or empty"); return; } String deviceName = hostName.replaceAll("[^\\w]", "_").toLowerCase() + "-" + userName.replaceAll("[^\\w]", "_").toLowerCase(); cFDbTestClientConfig.setDeviceName(deviceName); cFDbTestClientConfig.save(); log.message(S_ProcName + "INFO: Created CFAsterisk client configuration file " + cFDbTestClientConfigFileName); fastExit = true; } if (!cFDbTestClientConfigFile.isFile()) { log.message(S_ProcName + "ERROR: Proposed client configuration file " + cFDbTestClientConfigFileName + " is not a file."); fastExit = true; } if (!cFDbTestClientConfigFile.canRead()) { log.message(S_ProcName + "ERROR: Permission denied attempting to read client configuration file " + cFDbTestClientConfigFileName); fastExit = true; } cFDbTestClientConfig.load(); if (fastExit) { return; } // Configure logging Properties sysProps = System.getProperties(); sysProps.setProperty("log4j.rootCategory", "WARN"); sysProps.setProperty("org.apache.commons.logging.Log", "org.apache.commons.logging.impl.Log4JLogger"); Logger httpLogger = Logger.getLogger("org.apache.http"); httpLogger.setLevel(Level.WARN); ICFAsteriskSchema cFAsteriskSchema = new CFAsteriskOracleSchema(); cFAsteriskSchema.setConfigurationFile(cFAsteriskConfig); ICFAsteriskSchemaObj cFAsteriskSchemaObj = new CFAsteriskSchemaObj(); cFAsteriskSchemaObj.setBackingStore(cFAsteriskSchema); CFAsteriskSaxLoaderCLI cli = new CFAsteriskSaxOracleLoaderCLI(); CFAsteriskSaxLoader loader = cli.getSaxLoader(); loader.setSchemaObj(cFAsteriskSchemaObj); cFAsteriskSchema.connect(); String url = args[1]; if (numArgs >= 5) { cli.setClusterName(args[2]); cli.setTenantName(args[3]); cli.setSecUserName(args[4]); } else { cli.setClusterName("default"); cli.setTenantName("system"); cli.setSecUserName("system"); } loader.setUseCluster(cli.getClusterObj()); loader.setUseTenant(cli.getTenantObj()); try { cFAsteriskSchema.beginTransaction(); cFAsteriskSchemaObj.setSecCluster(cli.getClusterObj()); cFAsteriskSchemaObj.setSecTenant(cli.getTenantObj()); cFAsteriskSchemaObj.setSecUser(cli.getSecUserObj()); cFAsteriskSchemaObj.setSecSession(cli.getSecSessionObj()); CFSecurityAuthorization auth = new CFSecurityAuthorization(); auth.setSecCluster(cFAsteriskSchemaObj.getSecCluster()); auth.setSecTenant(cFAsteriskSchemaObj.getSecTenant()); auth.setSecSession(cFAsteriskSchemaObj.getSecSession()); cFAsteriskSchemaObj.setAuthorization(auth); applyLoaderOptions(loader, args[0]); if (numArgs >= 5) { cli.evaluateRemainingArgs(args, 5); } else { cli.evaluateRemainingArgs(args, 2); } loader.parseFile(url); cFAsteriskSchema.commit(); cFAsteriskSchema.disconnect(true); } catch (Exception e) { log.message(S_ProcName + "EXCEPTION: Could not parse XML file \"" + url + "\": " + e.getMessage()); e.printStackTrace(System.out); } catch (Error e) { log.message(S_ProcName + "ERROR: Could not parse XML file \"" + url + "\": " + e.getMessage()); e.printStackTrace(System.out); } finally { if (cFAsteriskSchema.isConnected()) { cFAsteriskSchema.rollback(); cFAsteriskSchema.disconnect(false); } } } else { log.message(S_ProcName + "ERROR: Expected at least two argument specifying the loader options and the name of the XML file to parse. The first argument may be empty."); } }
From source file:net.sourceforge.msscodefactory.cfasterisk.v2_4.CFAsteriskSaxSybaseLoaderCLI.CFAsteriskSaxSybaseLoaderCLI.java
public static void main(String args[]) { final String S_ProcName = "CFAsteriskSaxSybaseLoaderCLI.main() "; initConsoleLog();/*from w ww . j av a 2s . co m*/ int numArgs = args.length; if (numArgs >= 2) { String homeDirName = System.getProperty("HOME"); if (homeDirName == null) { homeDirName = System.getProperty("user.home"); if (homeDirName == null) { log.message(S_ProcName + "ERROR: Home directory not set"); return; } } File homeDir = new File(homeDirName); if (!homeDir.exists()) { log.message(S_ProcName + "ERROR: Home directory \"" + homeDirName + "\" does not exist"); return; } if (!homeDir.isDirectory()) { log.message(S_ProcName + "ERROR: Home directory \"" + homeDirName + "\" is not a directory"); return; } CFAsteriskConfigurationFile cFAsteriskConfig = new CFAsteriskConfigurationFile(); String cFAsteriskConfigFileName = homeDir.getPath() + File.separator + ".cfasterisksybaserc"; cFAsteriskConfig.setFileName(cFAsteriskConfigFileName); File cFAsteriskConfigFile = new File(cFAsteriskConfigFileName); if (!cFAsteriskConfigFile.exists()) { cFAsteriskConfig.setDbServer("localhost"); cFAsteriskConfig.setDbPort(2345); cFAsteriskConfig.setDbDatabase("CFAst24"); cFAsteriskConfig.setDbUserName("sa"); cFAsteriskConfig.setDbPassword("edit-me-please"); cFAsteriskConfig.save(); log.message(S_ProcName + "INFO: Created configuration file " + cFAsteriskConfigFileName + ", please edit configuration and restart."); return; } if (!cFAsteriskConfigFile.isFile()) { log.message(S_ProcName + "ERROR: Proposed configuration file " + cFAsteriskConfigFileName + " is not a file."); return; } if (!cFAsteriskConfigFile.canRead()) { log.message(S_ProcName + "ERROR: Permission denied attempting to read configuration file " + cFAsteriskConfigFileName); return; } cFAsteriskConfig.load(); boolean fastExit = false; CFAsteriskClientConfigurationFile cFDbTestClientConfig = new CFAsteriskClientConfigurationFile(); String cFDbTestClientConfigFileName = homeDir.getPath() + File.separator + ".cfdbtestclientrc"; cFDbTestClientConfig.setFileName(cFDbTestClientConfigFileName); File cFDbTestClientConfigFile = new File(cFDbTestClientConfigFileName); if (!cFDbTestClientConfigFile.exists()) { String cFDbTestKeyStoreFileName = homeDir.getPath() + File.separator + ".msscfjceks"; cFDbTestClientConfig.setKeyStore(cFDbTestKeyStoreFileName); InetAddress localHost; try { localHost = InetAddress.getLocalHost(); } catch (UnknownHostException e) { localHost = null; } if (localHost == null) { log.message(S_ProcName + "ERROR: LocalHost is null"); return; } String hostName = localHost.getHostName(); if ((hostName == null) || (hostName.length() <= 0)) { log.message("ERROR: LocalHost.HostName is null or empty"); return; } String userName = System.getProperty("user.name"); if ((userName == null) || (userName.length() <= 0)) { log.message("ERROR: user.name is null or empty"); return; } String deviceName = hostName.replaceAll("[^\\w]", "_").toLowerCase() + "-" + userName.replaceAll("[^\\w]", "_").toLowerCase(); cFDbTestClientConfig.setDeviceName(deviceName); cFDbTestClientConfig.save(); log.message(S_ProcName + "INFO: Created CFAsterisk client configuration file " + cFDbTestClientConfigFileName); fastExit = true; } if (!cFDbTestClientConfigFile.isFile()) { log.message(S_ProcName + "ERROR: Proposed client configuration file " + cFDbTestClientConfigFileName + " is not a file."); fastExit = true; } if (!cFDbTestClientConfigFile.canRead()) { log.message(S_ProcName + "ERROR: Permission denied attempting to read client configuration file " + cFDbTestClientConfigFileName); fastExit = true; } cFDbTestClientConfig.load(); if (fastExit) { return; } // Configure logging Properties sysProps = System.getProperties(); sysProps.setProperty("log4j.rootCategory", "WARN"); sysProps.setProperty("org.apache.commons.logging.Log", "org.apache.commons.logging.impl.Log4JLogger"); Logger httpLogger = Logger.getLogger("org.apache.http"); httpLogger.setLevel(Level.WARN); ICFAsteriskSchema cFAsteriskSchema = new CFAsteriskSybaseSchema(); cFAsteriskSchema.setConfigurationFile(cFAsteriskConfig); ICFAsteriskSchemaObj cFAsteriskSchemaObj = new CFAsteriskSchemaObj(); cFAsteriskSchemaObj.setBackingStore(cFAsteriskSchema); CFAsteriskSaxLoaderCLI cli = new CFAsteriskSaxSybaseLoaderCLI(); CFAsteriskSaxLoader loader = cli.getSaxLoader(); loader.setSchemaObj(cFAsteriskSchemaObj); cFAsteriskSchema.connect(); String url = args[1]; if (numArgs >= 5) { cli.setClusterName(args[2]); cli.setTenantName(args[3]); cli.setSecUserName(args[4]); } else { cli.setClusterName("default"); cli.setTenantName("system"); cli.setSecUserName("system"); } loader.setUseCluster(cli.getClusterObj()); loader.setUseTenant(cli.getTenantObj()); try { cFAsteriskSchema.beginTransaction(); cFAsteriskSchemaObj.setSecCluster(cli.getClusterObj()); cFAsteriskSchemaObj.setSecTenant(cli.getTenantObj()); cFAsteriskSchemaObj.setSecUser(cli.getSecUserObj()); cFAsteriskSchemaObj.setSecSession(cli.getSecSessionObj()); CFSecurityAuthorization auth = new CFSecurityAuthorization(); auth.setSecCluster(cFAsteriskSchemaObj.getSecCluster()); auth.setSecTenant(cFAsteriskSchemaObj.getSecTenant()); auth.setSecSession(cFAsteriskSchemaObj.getSecSession()); cFAsteriskSchemaObj.setAuthorization(auth); applyLoaderOptions(loader, args[0]); if (numArgs >= 5) { cli.evaluateRemainingArgs(args, 5); } else { cli.evaluateRemainingArgs(args, 2); } loader.parseFile(url); cFAsteriskSchema.commit(); cFAsteriskSchema.disconnect(true); } catch (Exception e) { log.message(S_ProcName + "EXCEPTION: Could not parse XML file \"" + url + "\": " + e.getMessage()); e.printStackTrace(System.out); } catch (Error e) { log.message(S_ProcName + "ERROR: Could not parse XML file \"" + url + "\": " + e.getMessage()); e.printStackTrace(System.out); } finally { if (cFAsteriskSchema.isConnected()) { cFAsteriskSchema.rollback(); cFAsteriskSchema.disconnect(false); } } } else { log.message(S_ProcName + "ERROR: Expected at least two argument specifying the loader options and the name of the XML file to parse. The first argument may be empty."); } }
From source file:net.sourceforge.msscodefactory.cfasterisk.v2_4.CFAsteriskSaxPgSqlLoaderCLI.CFAsteriskSaxPgSqlLoaderCLI.java
public static void main(String args[]) { final String S_ProcName = "CFAsteriskSaxPgSqlLoaderCLI.main() "; initConsoleLog();/*from www.jav a 2 s .c om*/ int numArgs = args.length; if (numArgs >= 2) { String homeDirName = System.getProperty("HOME"); if (homeDirName == null) { homeDirName = System.getProperty("user.home"); if (homeDirName == null) { log.message(S_ProcName + "ERROR: Home directory not set"); return; } } File homeDir = new File(homeDirName); if (!homeDir.exists()) { log.message(S_ProcName + "ERROR: Home directory \"" + homeDirName + "\" does not exist"); return; } if (!homeDir.isDirectory()) { log.message(S_ProcName + "ERROR: Home directory \"" + homeDirName + "\" is not a directory"); return; } CFAsteriskConfigurationFile cFAsteriskConfig = new CFAsteriskConfigurationFile(); String cFAsteriskConfigFileName = homeDir.getPath() + File.separator + ".cfasteriskpgsqlrc"; cFAsteriskConfig.setFileName(cFAsteriskConfigFileName); File cFAsteriskConfigFile = new File(cFAsteriskConfigFileName); if (!cFAsteriskConfigFile.exists()) { cFAsteriskConfig.setDbServer("127.0.0.1"); cFAsteriskConfig.setDbPort(5432); cFAsteriskConfig.setDbDatabase("CFAst24"); cFAsteriskConfig.setDbUserName("postgres"); cFAsteriskConfig.setDbPassword("edit-me-please"); cFAsteriskConfig.save(); log.message(S_ProcName + "INFO: Created configuration file " + cFAsteriskConfigFileName + ", please edit configuration and restart."); return; } if (!cFAsteriskConfigFile.isFile()) { log.message(S_ProcName + "ERROR: Proposed configuration file " + cFAsteriskConfigFileName + " is not a file."); return; } if (!cFAsteriskConfigFile.canRead()) { log.message(S_ProcName + "ERROR: Permission denied attempting to read configuration file " + cFAsteriskConfigFileName); return; } cFAsteriskConfig.load(); boolean fastExit = false; CFAsteriskClientConfigurationFile cFDbTestClientConfig = new CFAsteriskClientConfigurationFile(); String cFDbTestClientConfigFileName = homeDir.getPath() + File.separator + ".cfdbtestclientrc"; cFDbTestClientConfig.setFileName(cFDbTestClientConfigFileName); File cFDbTestClientConfigFile = new File(cFDbTestClientConfigFileName); if (!cFDbTestClientConfigFile.exists()) { String cFDbTestKeyStoreFileName = homeDir.getPath() + File.separator + ".msscfjceks"; cFDbTestClientConfig.setKeyStore(cFDbTestKeyStoreFileName); InetAddress localHost; try { localHost = InetAddress.getLocalHost(); } catch (UnknownHostException e) { localHost = null; } if (localHost == null) { log.message(S_ProcName + "ERROR: LocalHost is null"); return; } String hostName = localHost.getHostName(); if ((hostName == null) || (hostName.length() <= 0)) { log.message("ERROR: LocalHost.HostName is null or empty"); return; } String userName = System.getProperty("user.name"); if ((userName == null) || (userName.length() <= 0)) { log.message("ERROR: user.name is null or empty"); return; } String deviceName = hostName.replaceAll("[^\\w]", "_").toLowerCase() + "-" + userName.replaceAll("[^\\w]", "_").toLowerCase(); cFDbTestClientConfig.setDeviceName(deviceName); cFDbTestClientConfig.save(); log.message(S_ProcName + "INFO: Created CFAsterisk client configuration file " + cFDbTestClientConfigFileName); fastExit = true; } if (!cFDbTestClientConfigFile.isFile()) { log.message(S_ProcName + "ERROR: Proposed client configuration file " + cFDbTestClientConfigFileName + " is not a file."); fastExit = true; } if (!cFDbTestClientConfigFile.canRead()) { log.message(S_ProcName + "ERROR: Permission denied attempting to read client configuration file " + cFDbTestClientConfigFileName); fastExit = true; } cFDbTestClientConfig.load(); if (fastExit) { return; } // Configure logging Properties sysProps = System.getProperties(); sysProps.setProperty("log4j.rootCategory", "WARN"); sysProps.setProperty("org.apache.commons.logging.Log", "org.apache.commons.logging.impl.Log4JLogger"); Logger httpLogger = Logger.getLogger("org.apache.http"); httpLogger.setLevel(Level.WARN); ICFAsteriskSchema cFAsteriskSchema = new CFAsteriskPgSqlSchema(); cFAsteriskSchema.setConfigurationFile(cFAsteriskConfig); ICFAsteriskSchemaObj cFAsteriskSchemaObj = new CFAsteriskSchemaObj(); cFAsteriskSchemaObj.setBackingStore(cFAsteriskSchema); CFAsteriskSaxLoaderCLI cli = new CFAsteriskSaxPgSqlLoaderCLI(); CFAsteriskSaxLoader loader = cli.getSaxLoader(); loader.setSchemaObj(cFAsteriskSchemaObj); cFAsteriskSchema.connect(); String url = args[1]; if (numArgs >= 5) { cli.setClusterName(args[2]); cli.setTenantName(args[3]); cli.setSecUserName(args[4]); } else { cli.setClusterName("default"); cli.setTenantName("system"); cli.setSecUserName("system"); } loader.setUseCluster(cli.getClusterObj()); loader.setUseTenant(cli.getTenantObj()); try { cFAsteriskSchema.beginTransaction(); cFAsteriskSchemaObj.setSecCluster(cli.getClusterObj()); cFAsteriskSchemaObj.setSecTenant(cli.getTenantObj()); cFAsteriskSchemaObj.setSecUser(cli.getSecUserObj()); cFAsteriskSchemaObj.setSecSession(cli.getSecSessionObj()); CFSecurityAuthorization auth = new CFSecurityAuthorization(); auth.setSecCluster(cFAsteriskSchemaObj.getSecCluster()); auth.setSecTenant(cFAsteriskSchemaObj.getSecTenant()); auth.setSecSession(cFAsteriskSchemaObj.getSecSession()); cFAsteriskSchemaObj.setAuthorization(auth); applyLoaderOptions(loader, args[0]); if (numArgs >= 5) { cli.evaluateRemainingArgs(args, 5); } else { cli.evaluateRemainingArgs(args, 2); } loader.parseFile(url); cFAsteriskSchema.commit(); cFAsteriskSchema.disconnect(true); } catch (Exception e) { log.message(S_ProcName + "EXCEPTION: Could not parse XML file \"" + url + "\": " + e.getMessage()); e.printStackTrace(System.out); } catch (Error e) { log.message(S_ProcName + "ERROR: Could not parse XML file \"" + url + "\": " + e.getMessage()); e.printStackTrace(System.out); } finally { if (cFAsteriskSchema.isConnected()) { cFAsteriskSchema.rollback(); cFAsteriskSchema.disconnect(false); } } } else { log.message(S_ProcName + "ERROR: Expected at least two argument specifying the loader options and the name of the XML file to parse. The first argument may be empty."); } }
From source file:main.Driver.java
/** * The path to a properties file which will supply parameter values for the tests should be passed in as argument 0 to main. * The test that will be run is determined by the value of 'test_type' in the properties file, and each of the tests have their own properties: * 'encode+decode' - Encode and decode the given leadsheet with the autoencoder, writing the result to a leadsheet file. * Params: // w w w . j a v a 2 s . c om * * autoencoder_connectome={the path to the connectome which the autoencoder will be loaded with} * * name_generator_connectome={the path to the connectome which the name generator will be loaded with} * * input_leadsheet={the path to the leadsheet file which will be encoded and decoded} * * output_folder={the path to the output folder which the result leadsheet file will be written in} * * 'encode+write_queue' - Encode the given leadsheet with the autoencoder, then write the encoded feature queue to a queue file. * Params: * * autoencoder_connectome={the path to the connectome which the autoencoder will be loaded with} * * input_leadsheet={the path to the leadsheet file which will be encoded} * * queue_folder={the path to the output folder which the result queue file will be written in} * * 'encode+write_queue+decode' - Encode the given leadsheet with the autoencoder, write the encoded feature queue to a queue file, and then write the result leadsheet to a leadsheet file. * * autoencoder_connectome={the path to the connectome which the autoencoder will be loaded with} * * name_generator_connectome={the path to the connectome which the name generator will be loaded with} * * input_leadsheet={the path to the leadsheet file which will be encoded and decoded} * * queue_folder={the path to the output folder which the result queue file will be written in} * * output_folder={the path to the output folder which the result leadsheet file will be written in} * 'create_feature_property_vector' - Given a corpus folder of leadsheets, construct a vector consisting of property analysis values for each feature in the corpus data * * input_corpus_folder={the path to the corpus folder containing all leadsheets to analyze} * * feature_size={the size (in time steps) of each feature} * * feature_properties_path={the path to write the generated vector file to (the file will be a csv file containing all the values in left-to-right order} * * feature_property={the type of feature property to analyze - current options are 'rest', 'sustain', articulate' (these return ratios of time steps with the given property to the total time steps in the feature). * 'compile_feature_queue_matrix' - Given a corpus folder of feature queues, construct a matrix of all feature vectors and write it as a csv file * * queue_folder={the path to the folder containing all queue files to compile} * * feature_matrix_path={the path to write the result csv file to} * 'generate_from_feature_queue_matrix' - Given a matrix of feature vectors, load the autoencoder with a queue of those features and decode from it, writing the result leadsheet to a file * * autoencoder_connectome={the path to the connectome which the autoencoder will be loaded with} * * reference_leadsheet={the path to the leadsheet we will take the chord sequence from (and loop it to match the length of the feature queue)} * * feature_queue_matrix_path={the path to the feature queue matrix file we will decode from} * * output_file_path={the path to the file we will write our result leadsheet to} * * (optional) song_title={the song title to write in the leadsheet file - by default this is "Generation from Feature Matrix {path of the feature matrix}"} * * feature_size={the size (in time steps) of features} * 'population_trade' - Given a leadsheet file, split it into sections of a specified size, and between sections, generate a response that plays off of a population of previously encoded feature queues * * autoencoder_connectome={the path to the connectome which the autoencoder will be loaded with} * * input_leadsheet={the path to the leadsheet file which will be encoded and traded with} * * output_folder={the path to the output folder which the result leadsheet file will be written in} * * trading_part_size={the size (in time steps) of each trading part. The input leadsheet will be split into sections of this size, and trading responses will be generated in between.} * * interpolation_variance={a random value between zero and this will be added to the interpolation_min at each trading section to calculate the interpolation of the recently encoded queue towards the queue population before decoding the trading response} * * interpolation_min={the minimum ratio of interpolation at each trading section} * * herding_strength={the maximum strength of the herding operation at each section (all queues in the population are interpolated a random amount towards the most recent queue)} * * mutation_strength={the maximum strength of mutation at each section (each element of the feature vectors of all queues in the population are mutated at a random strength} * * crossover_strength{the maximum strength of crossover at each section (there is a chance for every queue that the queue will swap a random feature of itself with the corresponding feature of another random queue)} * 'interpolation' - Given a leadsheet file and a reference queue file, encode the leadsheet file with the autoencoder, and generate from the encoded queue for a number of divisions of a full interpolation towards the target queue * * autoencoder_connectome={the path to the connectome which the autoencoder will be loaded with} * * input_leadsheet={the path to the leadsheet file which will be encoded and interpolated} * * target_queue={the path to the queue to interpolate towards at each interpolation value}; * * output_folder={the path to the output folder which the result leadsheet file will be written in} * * num_interpolation_divisions={the number of divisions of the interpolation strength from 0.0 to 1.0 (the length of the result leadsheet will be equal to the length of the original times 1 + number of divisions, as the first section of the result leadsheet is for interpolation 0.0)} * 'frankenstein' - Given a primary queue, a reference leadsheet for chords, and a corpus of queue files, construct the result leadsheet from a series of randomly weighted interpolations of the primary queue towards the set of selected queues. * * autoencoder_connectome={the path to the connectome which the autoencoder will be loaded with} * * primary_queue_path={the path to the queue which will serve as the base for all of the queue combinations (which are the result of sequential interpolations instead of a weighted sum)} * * reference_leadsheet={the path to the leadsheet we will take the chord sequence from (and loop it to match the desired length of our output} * * queue_folder={the path to the folder containing all queue files we can select from} * * output_file_path={the path to the file we will write our result leadsheet to} * * num_reference_queues={the number of reference queues we will pick at random from the queue folder to sample from) * * num_combinations={the number of queue combinations to sample and create the result leadsheet from} * * interpolation_strength={the total magnitude of all interpolation operations for each combination} */ public static void main(String[] args) throws FileNotFoundException, IOException, ConfigurationException { FileBasedConfigurationBuilder<PropertiesConfiguration> builder = new FileBasedConfigurationBuilder<>( PropertiesConfiguration.class).configure( new Parameters().properties().setFileName(args[0]).setThrowExceptionOnMissing(true) .setListDelimiterHandler(new DefaultListDelimiterHandler(';')) .setIncludesAllowed(false)); Configuration config = builder.getConfiguration(); LogTimer.initStartTime(); //start our logging timer to keep track of our execution time //switch statement to run the appropriate test switch (config.getString("test_type")) { case "encode+decode": { //load parameter values from config file String autoencoderConnectomePath = config.getString("autoencoder_connectome"); String nameGeneratorConnectomePath = config.getString("name_generator_connectome"); String inputLeadsheetPath = config.getString("input_leadsheet"); String outputFolderPath = config.getString("output_folder"); //initialize networks NameGenerator nameGenerator = initializeNameGenerator(nameGeneratorConnectomePath); ProductCompressingAutoencoder autoencoder = initializeAutoencoder(autoencoderConnectomePath, false); //initialize input sequences and output sequence LeadsheetDataSequence inputSequence = leadsheetToSequence(inputLeadsheetPath); LeadsheetDataSequence outputSequence = inputSequence.copy(); outputSequence.clearMelody(); LeadsheetDataSequence decoderInputSequence = outputSequence.copy(); //encode and decode encodeFromSequence(autoencoder, inputSequence); decodeToSequence(autoencoder, outputSequence, decoderInputSequence); //generate song title String songTitle = nameGenerator.generateName(); //write output to specified directory with same file name + _aeOutput suffix writeLeadsheetFile(outputSequence, outputFolderPath, new File(inputLeadsheetPath).getName(), "_aeOutput", songTitle); } break; case "encode+write_queue": { //load parameter values from config file String autoencoderConnectomePath = config.getString("autoencoder_connectome"); String inputLeadsheetPath = config.getString("input_leadsheet"); String queueFolderPath = config.getString("queue_folder"); //initialize network ProductCompressingAutoencoder autoencoder = initializeAutoencoder(autoencoderConnectomePath, false); //initialize input sequence LeadsheetDataSequence inputSequence = leadsheetToSequence(inputLeadsheetPath); //encode encodeFromSequence(autoencoder, inputSequence); //write to a queue file in the specified queue folder (the write method will handle removing/adding extensions writeQueueFile(autoencoder, queueFolderPath, new File(inputLeadsheetPath).getName()); } break; case "encode+write_queue+decode": { //load parameter values from config file String autoencoderConnectomePath = config.getString("autoencoder_connectome"); String nameGeneratorConnectomePath = config.getString("name_generator_connectome"); String inputLeadsheetPath = config.getString("input_leadsheet"); String queueFolderPath = config.getString("queue_folder"); String outputFolderPath = config.getString("output_folder"); //initialize networks NameGenerator nameGenerator = initializeNameGenerator(nameGeneratorConnectomePath); ProductCompressingAutoencoder autoencoder = initializeAutoencoder(autoencoderConnectomePath, false); //initialize input sequences and output sequence LeadsheetDataSequence inputSequence = leadsheetToSequence(inputLeadsheetPath); LeadsheetDataSequence outputSequence = inputSequence.copy(); outputSequence.clearMelody(); LeadsheetDataSequence decoderInputSequence = outputSequence.copy(); //encode encodeFromSequence(autoencoder, inputSequence); //write to a queue file in the specified queue folder (the write method will handle removing/adding extensions writeQueueFile(autoencoder, queueFolderPath, new File(inputLeadsheetPath).getName()); //decode decodeToSequence(autoencoder, outputSequence, decoderInputSequence); //generate song title String songTitle = nameGenerator.generateName(); //write output to specified directory with same file name + _aeOutput suffix writeLeadsheetFile(outputSequence, outputFolderPath, new File(inputLeadsheetPath).getName(), "_aeOutput", songTitle); } break; case "create_feature_property_vector": { //load parameter values from config file String inputCorpusFolder = config.getString("input_corpus_folder"); int featureSize = config.getInt("feature_size"); String featurePropertiesPath = config.getString("feature_properties_path"); String featureProperty = config.getString("feature_property"); //compile array of valid leadsheet files File[] songFiles = new File(inputCorpusFolder) .listFiles((File dir, String name) -> name.endsWith(".ls")); //construct feature property vector from analyzed feature property values of all songs AVector featurePropertyValues = Vector.createLength(0); int featureIndex = 0; for (File inputFile : songFiles) { LeadsheetDataSequence melodySequence = leadsheetToSequence(inputFile.getPath()); featurePropertyValues.join(melodyFeatureAnalysis(melodySequence, featureProperty, featureSize)); } //write generated feature_properties BufferedWriter writer = new BufferedWriter( new FileWriter(featurePropertiesPath + "_" + featureProperty + ".v")); writer.write(ReadWriteUtilities.getNumpyCSVString(featurePropertyValues)); writer.close(); } break; case "compile_feature_queue_matrix": { //load parameter values from config file String queueFolderPath = config.getString("queue_folder"); String featureMatrixPath = config.getString("feature_matrix_path"); //generate feature matrix from all feature queues in specified queue folder File[] queueFiles = new File(queueFolderPath).listFiles((File dir, String name) -> name.endsWith(".q")); AMatrix totalFeatureMatrix = generateFeatureQueueMatrix(queueFiles); String writeData = ReadWriteUtilities.getNumpyCSVString(totalFeatureMatrix); BufferedWriter writer = new BufferedWriter(new FileWriter(featureMatrixPath)); writer.write(writeData); writer.close(); } break; case "generate_from_feature_queue_matrix": { //load parameter values from config file String autoencoderConnectomePath = config.getString("autoencoder_connectome"); String referenceLeadsheetPath = config.getString("reference_leadsheet"); String featureQueueMatrixPath = config.getString("feature_queue_matrix_path"); String outputFilePath = config.getString("output_file_path"); String songTitle = config.getString("song_title", "Generation from Feature Matrix " + featureQueueMatrixPath); int featureSize = config.getInt("feature_size"); //initialize network ProductCompressingAutoencoder autoencoder = initializeAutoencoder(autoencoderConnectomePath, false); //initialize chord sequence LeadsheetDataSequence chordSequence = leadsheetToSequence(referenceLeadsheetPath); chordSequence.clearMelody(); //call generation method generateFromFeatureMatrix(autoencoder, autoencoderConnectomePath, chordSequence, featureQueueMatrixPath, featureSize, outputFilePath, songTitle); } break; case "population_trade": { //load parameter values from config file String autoencoderConnectomePath = config.getString("autoencoder_connectome"); String inputLeadsheetPath = config.getString("input_leadsheet"); String outputFolderPath = config.getString("output_folder"); int tradingPartSize = config.getInt("trading_part_size"); double interpVariance = config.getDouble("interpolation_variance"); double interpMin = config.getDouble("interpolation_min"); double herdingStrength = config.getDouble("herding_strength"); double mutationStrength = config.getDouble("mutation_strength"); double crossoverStrength = config.getDouble("crossover_strength"); //initialize network ProductCompressingAutoencoder autoencoder = initializeAutoencoder(autoencoderConnectomePath, true); //perform population trading test populationTradingTest(autoencoder, autoencoderConnectomePath, new File(inputLeadsheetPath), new File(outputFolderPath), tradingPartSize, interpVariance, interpMin, herdingStrength, mutationStrength, crossoverStrength); } break; case "interpolation": { //load parameter values from config file String autoencoderConnectomePath = config.getString("autoencoder_connectome"); String inputLeadsheetPath = config.getString("input_leadsheet"); String targetQueuePath = config.getString("target_queue"); String outputFolderPath = config.getString("output_folder"); int numInterpolationDivisions = config.getInt("num_interpolation_divisions"); //initialize network ProductCompressingAutoencoder autoencoder = initializeAutoencoder(autoencoderConnectomePath, false); //perform the interpolation test interpolateTest(autoencoder, autoencoderConnectomePath, new File(inputLeadsheetPath), new File(targetQueuePath), new File(outputFolderPath), numInterpolationDivisions); } break; case "frankenstein": { //load parameter values from config file String autoencoderConnectomePath = config.getString("autoencoder_connectome"); String primaryQueuePath = config.getString("primary_queue_path"); String referenceLeadsheetPath = config.getString("reference_leadsheet"); String queueFolderPath = config.getString("queue_folder"); String outputFilePath = config.getString("output_file_path"); int numReferenceQueues = config.getInt("num_reference_queues"); int numCombinations = config.getInt("num_combinations"); double interpolationMagnitude = config.getDouble("interpolation_strength"); //initialize network ProductCompressingAutoencoder autoencoder = initializeAutoencoder(autoencoderConnectomePath, false); //initialize chord sequence LeadsheetDataSequence chordSequence = leadsheetToSequence(referenceLeadsheetPath); chordSequence.clearMelody(); //perform frankenstein test frankensteinTest(autoencoder, autoencoderConnectomePath, primaryQueuePath, new File(queueFolderPath), outputFilePath, chordSequence, numReferenceQueues, numCombinations, interpolationMagnitude); } break; default: throw new RuntimeException("Unrecognized test type"); } LogTimer.log("Process finished"); //Done! }
From source file:es.tid.fiware.fiwareconnectors.cygnus.nodes.CygnusApplication.java
/** * Main application to be run when this CygnusApplication is invoked. The only differences with the original one * are the CygnusApplication is used instead of the Application one, and the Management Interface port option in * the command line./* w w w .j a va 2 s. c o m*/ * @param args */ public static void main(String[] args) { try { Options options = new Options(); Option option = new Option("n", "name", true, "the name of this agent"); option.setRequired(true); options.addOption(option); option = new Option("f", "conf-file", true, "specify a conf file"); option.setRequired(true); options.addOption(option); option = new Option(null, "no-reload-conf", false, "do not reload " + "conf file if changed"); options.addOption(option); option = new Option("h", "help", false, "display help text"); options.addOption(option); option = new Option("p", "mgmt-if-port", true, "the management interface port"); option.setRequired(false); options.addOption(option); CommandLineParser parser = new GnuParser(); CommandLine commandLine = parser.parse(options, args); File configurationFile = new File(commandLine.getOptionValue('f')); String agentName = commandLine.getOptionValue('n'); boolean reload = !commandLine.hasOption("no-reload-conf"); if (commandLine.hasOption('h')) { new HelpFormatter().printHelp("flume-ng agent", options, true); return; } // if int mgmtIfPort = 8081; // default value if (commandLine.hasOption('p')) { mgmtIfPort = new Integer(commandLine.getOptionValue('p')).intValue(); } // if // the following is to ensure that by default the agent will fail on startup if the file does not exist if (!configurationFile.exists()) { // if command line invocation, then need to fail fast if (System.getProperty(Constants.SYSPROP_CALLED_FROM_SERVICE) == null) { String path = configurationFile.getPath(); try { path = configurationFile.getCanonicalPath(); } catch (IOException ex) { logger.error("Failed to read canonical path for file: " + path, ex); } // try catch throw new ParseException("The specified configuration file does not exist: " + path); } // if } // if List<LifecycleAware> components = Lists.newArrayList(); CygnusApplication application; if (reload) { EventBus eventBus = new EventBus(agentName + "-event-bus"); PollingPropertiesFileConfigurationProvider configurationProvider = new PollingPropertiesFileConfigurationProvider( agentName, configurationFile, eventBus, 30); components.add(configurationProvider); application = new CygnusApplication(components, mgmtIfPort); eventBus.register(application); } else { PropertiesFileConfigurationProvider configurationProvider = new PropertiesFileConfigurationProvider( agentName, configurationFile); application = new CygnusApplication(mgmtIfPort); application.handleConfigurationEvent(configurationProvider.getConfiguration()); } // if else application.start(); final CygnusApplication appReference = application; Runtime.getRuntime().addShutdownHook(new Thread("agent-shutdown-hook") { @Override public void run() { appReference.stop(); } // run }); } catch (Exception e) { logger.error("A fatal error occurred while running. Exception follows.", e); } // try catch }
From source file:exm.stc.ui.Main.java
public static void main(String[] args) { Args stcArgs = processArgs(args);// w w w . j a va 2s . c o m try { Settings.initSTCProperties(); } catch (InvalidOptionException ex) { System.err.println("Error setting up options: " + ex.getMessage()); System.exit(1); } Logger logger = null; try { logger = setupLogging(); } catch (InvalidOptionException ex) { System.err.println("Error setting up logging: " + ex.getMessage()); System.exit(1); } boolean preprocess = preprocessEnabled(logger); File inputFile = setupInputFile(logger, preprocess, stcArgs); PrintStream icOutput = setupICOutput(); File finalOutput = selectOutputFile(stcArgs); if (skipCompile(stcArgs, finalOutput)) { System.exit(ExitCode.SUCCESS.code()); } // Use intermediate file so we don't create invalid output in case of // compilation errors File tmpOutput = setupTmpOutput(); OutputStream outStream = openForOutput(tmpOutput); try { if (preprocessOnly()) { copyToOutput(inputFile, finalOutput); } else { STCompiler stc = new STCompiler(logger); stc.compile(inputFile.getPath(), stcArgs.inputFilename, preprocess, outStream, icOutput); copyToOutput(tmpOutput, finalOutput); } cleanupFiles(true, stcArgs); } catch (STCFatal ex) { // Cleanup output file if present cleanupFiles(false, stcArgs); System.exit(ex.exitCode); } }
From source file:es.upm.dit.xsdinferencer.XSDInferencer.java
/** * Main method, executed when the tool is invoked as a standalone application * @param args an array with all the arguments passed to the application * @throws XSDConfigurationException if there is a problem regarding the configuration * @throws IOException if there is an I/O problem while reading the input XML files or writing the output files * @throws JDOMException if there is any problem while parsing the input XML files */// w w w . j av a2 s . com public static void main(String[] args) throws Exception { if (Arrays.asList(args).contains("--help")) { printHelp(); System.exit(0); } try { XSDInferencer inferencer = new XSDInferencer(); Results results = inferencer.inferSchema(args); Map<String, String> xsdsAsXMLStrings = results.getXSDsAsStrings(); Map<String, String> jsonsAsStrings = results.getJsonSchemasAsStrings(); Map<String, String> schemasAsStrings = xsdsAsXMLStrings != null ? xsdsAsXMLStrings : jsonsAsStrings; Map<String, String> statisticsDocumentsAsXMLStrings = results.getStatisticsAsStrings(); File outputDirectory = null; for (int i = 0; i < args.length; i++) { if (!args[i].equalsIgnoreCase("--" + KEY_OUTPUT_DIRECTORY)) continue; if (args[i + 1].startsWith("--") || i == args.length - 1) throw new IllegalArgumentException("Output directory parameter bad specified"); outputDirectory = new File(args[i + 1]); if (!outputDirectory.exists()) throw new FileNotFoundException("Output directory not found."); if (!outputDirectory.isDirectory()) throw new NotDirectoryException(outputDirectory.getPath()); } if (outputDirectory != null) { System.out.println("Writing results to " + outputDirectory.getAbsolutePath()); for (String name : schemasAsStrings.keySet()) { File currentOutpuFile = new File(outputDirectory, name); FileOutputStream fOs = new FileOutputStream(currentOutpuFile); BufferedWriter bWriter = new BufferedWriter(new OutputStreamWriter(fOs, Charsets.UTF_8)); bWriter.write(schemasAsStrings.get(name)); bWriter.flush(); bWriter.close(); } if (statisticsDocumentsAsXMLStrings != null) { for (String name : statisticsDocumentsAsXMLStrings.keySet()) { File currentOutpuFile = new File(outputDirectory, name); FileWriter fWriter = new FileWriter(currentOutpuFile); BufferedWriter bWriter = new BufferedWriter(fWriter); bWriter.write(statisticsDocumentsAsXMLStrings.get(name)); bWriter.flush(); bWriter.close(); } } System.out.println("Results written"); } else { for (String name : schemasAsStrings.keySet()) { System.out.println(name + ":"); System.out.println(schemasAsStrings.get(name)); System.out.println(); } if (statisticsDocumentsAsXMLStrings != null) { for (String name : statisticsDocumentsAsXMLStrings.keySet()) { System.out.println(name + ":"); System.out.println(statisticsDocumentsAsXMLStrings.get(name)); System.out.println(); } } } } catch (XSDInferencerException e) { System.err.println(); System.err.println("Error at inference proccess: " + e.getMessage()); e.printStackTrace(); System.exit(1); } }
From source file:grnet.filter.XMLFiltering.java
public static void main(String[] args) throws IOException { // TODO Auto-generated method ssstub Enviroment enviroment = new Enviroment(args[0]); if (enviroment.envCreation) { Core core = new Core(); XMLSource source = new XMLSource(args[0]); File sourceFile = source.getSource(); if (sourceFile.exists()) { Collection<File> xmls = source.getXMLs(); System.out.println("Filtering repository:" + enviroment.dataProviderFilteredIn.getName()); System.out.println("Number of files to filter:" + xmls.size()); Iterator<File> iterator = xmls.iterator(); FilteringReport report = null; if (enviroment.getArguments().getProps().getProperty(Constants.createReport) .equalsIgnoreCase("true")) { report = new FilteringReport(enviroment.getArguments().getDestFolderLocation(), enviroment.getDataProviderFilteredIn().getName()); }//from w w w. j a v a2 s. co m ConnectionFactory factory = new ConnectionFactory(); factory.setHost(enviroment.getArguments().getQueueHost()); factory.setUsername(enviroment.getArguments().getQueueUserName()); factory.setPassword(enviroment.getArguments().getQueuePassword()); while (iterator.hasNext()) { StringBuffer logString = new StringBuffer(); logString.append(enviroment.dataProviderFilteredIn.getName()); File xmlFile = iterator.next(); String name = xmlFile.getName(); name = name.substring(0, name.indexOf(".xml")); logString.append(" " + name); boolean xmlIsFilteredIn = core.filterXML(xmlFile, enviroment.getArguments().getQueries()); if (xmlIsFilteredIn) { logString.append(" " + "FilteredIn"); slf4jLogger.info(logString.toString()); Connection connection = factory.newConnection(); Channel channel = connection.createChannel(); channel.queueDeclare(QUEUE_NAME, false, false, false, null); channel.basicPublish("", QUEUE_NAME, null, logString.toString().getBytes()); channel.close(); connection.close(); try { if (report != null) { report.appendXMLFileNameNStatus(xmlFile.getPath(), Constants.filteredInData); report.raiseFilteredInFilesNum(); } FileUtils.copyFileToDirectory(xmlFile, enviroment.getDataProviderFilteredIn()); } catch (IOException e) { // TODO Auto-generated catch block // e.printStackTrace(); e.printStackTrace(); System.out.println("Filtering failed."); } } else { logString.append(" " + "FilteredOut"); slf4jLogger.info(logString.toString()); Connection connection = factory.newConnection(); Channel channel = connection.createChannel(); channel.queueDeclare(QUEUE_NAME, false, false, false, null); channel.basicPublish("", QUEUE_NAME, null, logString.toString().getBytes()); channel.close(); connection.close(); try { if (report != null) { report.appendXMLFileNameNStatus(xmlFile.getPath(), Constants.filteredOutData); report.raiseFilteredOutFilesNum(); } FileUtils.copyFileToDirectory(xmlFile, enviroment.getDataProviderFilteredOuT()); } catch (IOException e) { // TODO Auto-generated catch block // e.printStackTrace(); e.printStackTrace(); System.out.println("Filtering failed."); } } } if (report != null) { report.appendXPathExpression(enviroment.getArguments().getQueries()); report.appendGeneralInfo(); } System.out.println("Filtering is done."); } } }
From source file:imp.lstm.main.Driver.java
public static void main(String[] args) throws FileNotFoundException, IOException, ConfigurationException, InvalidParametersException { FileBasedConfigurationBuilder<PropertiesConfiguration> builder = new FileBasedConfigurationBuilder<>( PropertiesConfiguration.class).configure( new Parameters().properties().setFileName(args[0]).setThrowExceptionOnMissing(true) .setListDelimiterHandler(new DefaultListDelimiterHandler(';')) .setIncludesAllowed(false)); Configuration config = builder.getConfiguration(); String inputSongPath = config.getString("input_song"); String outputFolderPath = config.getString("output_folder"); String autoEncoderParamsPath = config.getString("auto_encoder_params"); String nameGeneratorParamsPath = config.getString("name_generator_params"); String queueFolderPath = config.getString("queue_folder"); String referenceQueuePath = config.getString("reference_queue", "nil"); String inputCorpusFolder = config.getString("input_corpus_folder"); boolean shouldWriteQueue = config.getBoolean("should_write_generated_queue"); boolean frankensteinTest = config.getBoolean("queue_tests_frankenstein"); boolean interpolateTest = config.getBoolean("queue_tests_interpolation"); boolean iterateOverCorpus = config.getBoolean("iterate_over_corpus", false); boolean shouldGenerateSongTitle = config.getBoolean("generate_song_title"); boolean shouldGenerateSong = config.getBoolean("generate_leadsheet"); LogTimer.initStartTime(); //start our logging timer to keep track of our execution time LogTimer.log("Creating name generator..."); //here is just silly code for generating name based on an LSTM lol $wag LSTM lstm = new LSTM(); FullyConnectedLayer fullLayer = new FullyConnectedLayer(Operations.None); Loadable titleNetLoader = new Loadable() { @Override// www. j a va 2s. co m public boolean load(INDArray array, String path) { String car = pathCar(path); String cdr = pathCdr(path); switch (car) { case "full": return fullLayer.load(array, cdr); case "lstm": return lstm.load(array, cdr); default: return false; } } }; LogTimer.log("Packing name generator from files..."); (new NetworkConnectomeLoader()).load(nameGeneratorParamsPath, titleNetLoader); String characterString = " !\"'[],-.01245679:?ABCDEFGHIJKLMNOPQRSTUVWYZabcdefghijklmnopqrstuvwxyz"; //Initialization LogTimer.log("Creating autoencoder..."); int inputSize = 34; int outputSize = EncodingParameters.noteEncoder.getNoteLength(); int featureVectorSize = 100; ProductCompressingAutoencoder autoencoder = new ProductCompressingAutoencoder(24, 48, 84 + 1, false); //create our network int numInterpolationDivisions = 5; //"pack" the network from weights and biases file directory LogTimer.log("Packing autoencoder from files"); (new NetworkConnectomeLoader()).load(autoEncoderParamsPath, autoencoder); File[] songFiles; if (iterateOverCorpus) { songFiles = new File(inputCorpusFolder).listFiles(); } else { songFiles = new File[] { new File(inputSongPath) }; } for (File inputFile : songFiles) { (new NetworkConnectomeLoader()).refresh(autoEncoderParamsPath, autoencoder, "initialstate"); String songTitle; if (shouldGenerateSong) { Random rand = new Random(); AVector charOut = Vector.createLength(characterString.length()); GroupedSoftMaxSampler sampler = new GroupedSoftMaxSampler( new Group[] { new Group(0, characterString.length(), true) }); songTitle = ""; for (int i = 0; i < 50; i++) { charOut = fullLayer.forward(lstm.step(charOut)); charOut = sampler.filter(charOut); int charIndex = 0; for (; charIndex < charOut.length(); charIndex++) { if (charOut.get(charIndex) == 1.0) { break; } } songTitle += characterString.substring(charIndex, charIndex + 1); } songTitle = songTitle.trim(); LogTimer.log("Generated song name: " + songTitle); } else { songTitle = "The Song We Never Name"; } LogTimer.log("Reading file..."); LeadSheetDataSequence inputSequence = LeadSheetIO.readLeadSheet(inputFile); //read our leadsheet to get a data vessel as retrieved in rbm-provisor LeadSheetDataSequence outputSequence = inputSequence.copy(); outputSequence.clearMelody(); if (interpolateTest) { LeadSheetDataSequence additionalOutput = outputSequence.copy(); for (int i = 0; i < numInterpolationDivisions; i++) { outputSequence.concat(additionalOutput.copy()); } } LeadSheetDataSequence decoderInputSequence = outputSequence.copy(); LogTimer.startLog("Encoding data..."); //TradingTimer.initStart(); //start our trading timer to keep track our our generation versus realtime play while (inputSequence.hasNext()) { //iterate through time steps in input data //TradingTimer.waitForNextTimedInput(); autoencoder.encodeStep(inputSequence.retrieve()); //feed the resultant input vector into the network if (advanceDecoding) { //if we are using advance decoding (we start decoding as soon as we can) if (autoencoder.canDecode()) { //if queue has enough data to decode from outputSequence.pushStep(null, null, autoencoder.decodeStep(decoderInputSequence.retrieve())); //take sampled data for a timestep from autoencoder //TradingTimer.logTimestep(); //log our time to TradingTimer so we can know how far ahead of realtime we are } } } LogTimer.endLog(); if (shouldWriteQueue) { String queueFilePath = queueFolderPath + java.io.File.separator + inputFile.getName().replace(".ls", ".q"); FragmentedNeuralQueue currQueue = autoencoder.getQueue(); currQueue.writeToFile(queueFilePath); LogTimer.log("Wrote queue " + inputFile.getName().replace(".ls", ".q") + " to file..."); } if (shouldGenerateSong) { if (interpolateTest) { FragmentedNeuralQueue refQueue = new FragmentedNeuralQueue(); refQueue.initFromFile(referenceQueuePath); FragmentedNeuralQueue currQueue = autoencoder.getQueue(); //currQueue.writeToFile(queueFilePath); autoencoder.setQueue(currQueue.copy()); while (autoencoder.hasDataStepsLeft()) { //we are done encoding all time steps, so just finish decoding!{ outputSequence.pushStep(null, null, autoencoder.decodeStep(decoderInputSequence.retrieve())); //take sampled data for a timestep from autoencoder //TradingTimer.logTimestep(); //log our time to TradingTimer so we can know how far ahead of realtime we are } for (int i = 1; i <= numInterpolationDivisions; i++) { System.out.println("Starting interpolation " + ((1.0 / numInterpolationDivisions) * (i))); (new NetworkConnectomeLoader()).refresh(autoEncoderParamsPath, autoencoder, "initialstate"); FragmentedNeuralQueue currCopy = currQueue.copy(); currCopy.basicInterpolate(refQueue, (1.0 / numInterpolationDivisions) * (i)); autoencoder.setQueue(currCopy); int timeStep = 0; while (autoencoder.hasDataStepsLeft()) { //we are done encoding all time steps, so just finish decoding!{ System.out.println("interpolation " + i + " step " + ++timeStep); outputSequence.pushStep(null, null, autoencoder.decodeStep(decoderInputSequence.retrieve())); //take sampled data for a timestep from autoencoder //TradingTimer.logTimestep(); //log our time to TradingTimer so we can know how far ahead of realtime we are } } } if (frankensteinTest) { LogTimer.startLog("Loading queues"); File queueFolder = new File(queueFolderPath); int numComponents = config.getInt("frankenstein_num_components", 5); int numCombinations = config.getInt("frankenstein_num_combinations", 6); double interpolationMagnitude = config.getDouble("frankenstein_magnitude", 2.0); if (queueFolder.isDirectory()) { File[] queueFiles = queueFolder.listFiles(new FilenameFilter() { @Override public boolean accept(File dir, String name) { return name.contains(".q"); } }); List<File> fileList = new ArrayList<>(); for (File file : queueFiles) { fileList.add(file); } Collections.shuffle(fileList); int numSelectedFiles = (numComponents > queueFiles.length) ? queueFiles.length : numComponents; for (int i = 0; i < queueFiles.length - numSelectedFiles; i++) { fileList.remove(fileList.size() - 1); } List<FragmentedNeuralQueue> queuePopulation = new ArrayList<>(fileList.size()); songTitle += " - a mix of "; for (File file : fileList) { FragmentedNeuralQueue newQueue = new FragmentedNeuralQueue(); newQueue.initFromFile(file.getPath()); queuePopulation.add(newQueue); songTitle += file.getName().replaceAll(".ls", "") + ", "; } LogTimer.endLog(); LeadSheetDataSequence additionalOutput = outputSequence.copy(); for (int i = 1; i < numCombinations; i++) { outputSequence.concat(additionalOutput.copy()); } decoderInputSequence = outputSequence.copy(); FragmentedNeuralQueue origQueue = autoencoder.getQueue(); for (int i = 0; i < numCombinations; i++) { LogTimer.startLog("Performing queue interpolation..."); AVector combinationStrengths = Vector.createLength(queuePopulation.size()); Random vectorRand = new Random(i); for (int j = 0; j < combinationStrengths.length(); j++) { combinationStrengths.set(j, vectorRand.nextDouble()); } combinationStrengths.divide(combinationStrengths.elementSum()); FragmentedNeuralQueue currQueue = origQueue.copy(); for (int k = 0; k < combinationStrengths.length(); k++) { currQueue.basicInterpolate(queuePopulation.get(k), combinationStrengths.get(k) * interpolationMagnitude); } LogTimer.endLog(); autoencoder.setQueue(currQueue); LogTimer.startLog("Refreshing autoencoder state..."); (new NetworkConnectomeLoader()).refresh(autoEncoderParamsPath, autoencoder, "initialstate"); LogTimer.endLog(); LogTimer.startLog("Decoding segment..."); while (autoencoder.hasDataStepsLeft()) { //we are done encoding all time steps, so just finish decoding!{ outputSequence.pushStep(null, null, autoencoder.decodeStep(decoderInputSequence.retrieve())); //take sampled data for a timestep from autoencoder //TradingTimer.logTimestep(); //log our time to TradingTimer so we can know how far ahead of realtime we are } LogTimer.endLog(); } } } while (autoencoder.hasDataStepsLeft()) { //we are done encoding all time steps, so just finish decoding!{ outputSequence.pushStep(null, null, autoencoder.decodeStep(decoderInputSequence.retrieve())); //take sampled data for a timestep from autoencoder //TradingTimer.logTimestep(); //log our time to TradingTimer so we can know how far ahead of realtime we are } LogTimer.log("Writing file..."); String outputFilename = outputFolderPath + java.io.File.separator + inputFile.getName().replace(".ls", "_Output"); //we'll write our generated file with the same name plus "_Output" LeadSheetIO.writeLeadSheet(outputSequence, outputFilename, songTitle); System.out.println(outputFilename); } else { autoencoder.setQueue(new FragmentedNeuralQueue()); } } LogTimer.log("Process finished"); //Done! }
From source file:bear.core.BearMain.java
/** * -VbearMain.appConfigDir=src/main/groovy/examples -VbearMain.buildDir=.bear/classes -VbearMain.script=dumpSampleGrid -VbearMain.projectClass=SecureSocialDemoProject -VbearMain.propertiesFile=.bear/test.properties *///from w w w. jav a 2 s . c o m public static void main(String[] args) throws Exception { int i = ArrayUtils.indexOf(args, "--log-level"); if (i != -1) { LoggingBooter.changeLogLevel(LogManager.ROOT_LOGGER_NAME, Level.toLevel(args[i + 1])); } i = ArrayUtils.indexOf(args, "-q"); if (i != -1) { LoggingBooter.changeLogLevel(LogManager.ROOT_LOGGER_NAME, Level.WARN); } GlobalContext global = GlobalContext.getInstance(); BearMain bearMain = null; try { bearMain = new BearMain(global, getCompilerManager(), args); } catch (Exception e) { if (e.getClass().getSimpleName().equals("MissingRequiredOptionException")) { System.out.println(e.getMessage()); } else { Throwables.getRootCause(e).printStackTrace(); } System.exit(-1); } if (bearMain.checkHelpAndVersion()) { return; } AppOptions2 options2 = bearMain.options; if (options2.has(AppOptions2.UNPACK_DEMOS)) { String filesAsText = ProjectGenerator.readResource("/demoFiles.txt"); int count = 0; for (String resource : filesAsText.split("::")) { File dest = new File(BEAR_DIR + resource); System.out.printf("copying %s to %s...%n", resource, dest); writeStringToFile(dest, ProjectGenerator.readResource(resource)); count++; } System.out.printf("extracted %d files%n", count); return; } if (options2.has(AppOptions2.CREATE_NEW)) { String dashedTitle = options2.get(AppOptions2.CREATE_NEW); String user = options2.get(AppOptions2.USER); String pass = options2.get(AppOptions2.PASSWORD); List<String> hosts = options2.getList(AppOptions2.HOSTS); List<String> template; if (options2.has(AppOptions2.TEMPLATE)) { template = options2.getList(AppOptions2.TEMPLATE); } else { template = emptyList(); } ProjectGenerator g = new ProjectGenerator(dashedTitle, user, pass, hosts, template); if (options2.has(AppOptions2.ORACLE_USER)) { g.oracleUser = options2.get(AppOptions2.ORACLE_USER); } if (options2.has(AppOptions2.ORACLE_PASSWORD)) { g.oraclePassword = options2.get(AppOptions2.ORACLE_PASSWORD); } File projectFile = new File(BEAR_DIR, g.getProjectTitle() + ".groovy"); File pomFile = new File(BEAR_DIR, "pom.xml"); writeStringToFile(projectFile, g.processTemplate("TemplateProject.template")); writeStringToFile(new File(BEAR_DIR, dashedTitle + ".properties"), g.processTemplate("project-properties.template")); writeStringToFile(new File(BEAR_DIR, "demos.properties"), g.processTemplate("project-properties.template")); writeStringToFile(new File(BEAR_DIR, "bear-fx.properties"), g.processTemplate("bear-fx.properties.template")); writeStringToFile(pomFile, g.generatePom(dashedTitle)); System.out.printf("Created project file: %s%n", projectFile.getPath()); System.out.printf("Created maven pom: %s%n", pomFile.getPath()); System.out.println("\nProject files have been created. You may now: " + "\n a) Run `bear " + g.getShortName() + ".ls` to quick-test your minimal setup" + "\n b) Import the project to IDE or run smoke tests, find more details at the project wiki: https://github.com/chaschev/bear/wiki/."); return; } Bear bear = global.bear; if (options2.has(AppOptions2.QUIET)) { global.put(bear.quiet, true); LoggingBooter.changeLogLevel(LogManager.ROOT_LOGGER_NAME, Level.WARN); } if (options2.has(AppOptions2.USE_UI)) { global.put(bear.useUI, true); } if (options2.has(AppOptions2.NO_UI)) { global.put(bear.useUI, false); } List<?> list = options2.getOptionSet().nonOptionArguments(); if (list.size() > 1) { throw new IllegalArgumentException("too many arguments: " + list + ", " + "please specify an invoke line, project.method(arg1, arg2)"); } if (list.isEmpty()) { throw new UnsupportedOperationException("todo implement running a single project"); } String invokeLine = (String) list.get(0); String projectName; String method; if (invokeLine.contains(".")) { projectName = StringUtils.substringBefore(invokeLine, "."); method = StringUtils.substringAfter(invokeLine, "."); } else { projectName = invokeLine; method = null; } if (method == null || method.isEmpty()) method = "deploy()"; if (!method.contains("(")) method += "()"; Optional<CompiledEntry<? extends BearProject>> optional = bearMain.compileManager.findProject(projectName); if (!optional.isPresent()) { throw new IllegalArgumentException("project was not found: " + projectName + ", loaded classes: \n" + Joiner.on("\n").join(bearMain.compileManager.findProjects()) + ", searched in: " + bearMain.compileManager.getSourceDirs() + ", "); } BearProject project = OpenBean.newInstance(optional.get().aClass).injectMain(bearMain); GroovyShell shell = new GroovyShell(); shell.setVariable("project", project); shell.evaluate("project." + method); }