List of usage examples for java.util Arrays fill
public static void fill(Object[] a, Object val)
From source file:hyperheuristics.main.comparisons.CompareHypervolumes.java
private static void generateTables(String[] problems, String[] heuristicFunctions, int numberOfObjectives, String[] algorithms) throws InterruptedException, IOException { String outputDirectory = outpath; try (FileWriter fileWriter = new FileWriter(outputDirectory + "TABLES_" + numberOfObjectives + ".txt")) { StringBuilder tableString = new StringBuilder(); DecimalFormat decimalFormatter = new DecimalFormat("0.00E0"); StandardDeviation standardDeviation = new StandardDeviation(); pfKnown: {//from ww w . j a va2s.c o m tableString.append("\\documentclass{paper}\n" + "\n" + "\\usepackage[T1]{fontenc}\n" + "\\usepackage[latin1]{inputenc}\n" + "\\usepackage[hidelinks]{hyperref}\n" + "\\usepackage{tabulary}\n" + "\\usepackage{booktabs}\n" + "\\usepackage{multirow}\n" + "\\usepackage{amsmath}\n" + "\\usepackage{mathtools}\n" + "\\usepackage{graphicx}\n" + "\\usepackage{array}\n" + "\\usepackage[linesnumbered,ruled,inoutnumbered]{algorithm2e}\n" + "\\usepackage{subfigure}\n" + "\\usepackage[hypcap]{caption}\n" + "\\usepackage{pdflscape}\n" + "\n" + "\\begin{document}\n" + "\n" + "\\begin{landscape}\n" + "\n"); tableString .append("\\begin{table}[!htb]\n" + "\t\\centering\n" + "\t\\def\\arraystretch{1.5}\n" + "\t\\setlength{\\tabcolsep}{10pt}\n" + "\t\\fontsize{8pt}{10pt}\\selectfont" + "\t\\caption{Hypervolume of the $PF_{known}$ fronts for ") .append(numberOfObjectives).append(" objectives}\n" + "\t\\label{tab:Hypervolumes ") .append(numberOfObjectives).append(" objectives}\n" + "\t\\begin{tabulary}{\\textwidth}{c"); for (String algorithm : algorithms) { tableString.append("c"); for (String heuristicFunction : heuristicFunctions) { tableString.append("c"); } } tableString.append("}\n"); tableString.append("\t\t\\toprule\n"); tableString.append("\t\t\\textbf{System}"); for (String algorithm : algorithms) { tableString.append(" & \\textbf{").append(algorithm).append("}"); for (String heuristicFunction : heuristicFunctions) { tableString.append(" & \\textbf{").append(algorithm).append("-").append(heuristicFunction) .append("}"); } } tableString.append("\\\\\n"); tableString.append("\t\t\\midrule\n"); for (String problem : problems) { HypervolumeHandler hypervolumeHandler = new HypervolumeHandler(); for (String algorithm : algorithms) { String mecbaDirectory = "resultado/" + algorithm.toLowerCase().replaceAll("-", "") + "/" + problem + "_Comb_" + numberOfObjectives + "obj/"; //Best PFknown hypervolume //Populate HypervolueHandler hypervolumeHandler.addParetoFront(mecbaDirectory + "All_FUN_" + algorithm.toLowerCase().replaceAll("-", "") + "-" + problem); for (String heuristicFunction : heuristicFunctions) { String path = outpath; path += algorithm + "/" + numberOfObjectives + "objectives/"; String hyperheuristicDirectory = path + heuristicFunction + "/" + problem + "/"; hypervolumeHandler.addParetoFront(hyperheuristicDirectory + "FUN.txt"); } } double[] mecbaHypervolumes = new double[algorithms.length]; double[] hyperheuristicHypervolumes = new double[heuristicFunctions.length * algorithms.length]; Arrays.fill(hyperheuristicHypervolumes, 0D); for (int i = 0; i < algorithms.length; i++) { String algorithm = algorithms[i]; String mecbaDirectory = "resultado/" + algorithm.toLowerCase().replaceAll("-", "") + "/" + problem + "_Comb_" + numberOfObjectives + "obj/"; //Calculate Hypervolume mecbaHypervolumes[i] = hypervolumeHandler.calculateHypervolume(mecbaDirectory + "All_FUN_" + algorithm.toLowerCase().replaceAll("-", "") + "-" + problem, numberOfObjectives); for (int j = 0; j < heuristicFunctions.length; j++) { String heuristicFunction = heuristicFunctions[j]; String path = outpath; path += algorithm + "/" + numberOfObjectives + "objectives/"; String hyperheuristicDirectory = path + heuristicFunction + "/" + problem + "/"; hyperheuristicHypervolumes[i * heuristicFunctions.length + j] = hypervolumeHandler .calculateHypervolume(hyperheuristicDirectory + "FUN.txt", numberOfObjectives); } } //Write PFknown results double maxHypervolume = Double.NEGATIVE_INFINITY; for (int i = 0; i < mecbaHypervolumes.length; i++) { double hypervolume = mecbaHypervolumes[i]; if (hypervolume > maxHypervolume) { maxHypervolume = hypervolume; } } for (int i = 0; i < heuristicFunctions.length; i++) { if (hyperheuristicHypervolumes[i] > maxHypervolume) { maxHypervolume = hyperheuristicHypervolumes[i]; } } tableString.append("\t\t" + problem.replaceAll("\\_", "\\\\_")); for (int i = 0; i < algorithms.length; i++) { tableString.append(" & "); double mecbaHypervolume = mecbaHypervolumes[i]; if (maxHypervolume == mecbaHypervolume) { tableString.append("\\textbf{"); } tableString.append(decimalFormatter.format(mecbaHypervolume)); if (maxHypervolume == mecbaHypervolume) { tableString.append("}"); } for (int j = 0; j < heuristicFunctions.length; j++) { tableString.append(" & "); double hyperheuristicHypervolume = hyperheuristicHypervolumes[i * heuristicFunctions.length + j]; if (maxHypervolume == hyperheuristicHypervolume) { tableString.append("\\textbf{"); } tableString.append(decimalFormatter.format(hyperheuristicHypervolume)); if (maxHypervolume == hyperheuristicHypervolume) { tableString.append("}"); } } } tableString.append("\\\\\n"); } tableString.append("\t\t\\bottomrule\n"); tableString.append("\t\\end{tabulary}\n"); tableString.append("\\end{table}\n\n"); } //Best mean hypervolume mean: { tableString.append("\\begin{table}[!htb]\n" + "\\centering\n" + "\t\\def\\arraystretch{1.5}\n" + "\t\\setlength{\\tabcolsep}{10pt}\n" + "\t\\fontsize{8pt}{10pt}\\selectfont" + "\t\\caption{Hypervolume average found for " + numberOfObjectives + " objectives}\n" + "\t\\label{tab:Hypervolumes average " + numberOfObjectives + " objectives}\n" + "\t\\begin{tabulary}{\\textwidth}{c"); for (String algorithm : algorithms) { tableString.append("c"); for (String heuristicFunction : heuristicFunctions) { tableString.append("c"); } } tableString.append("}\n"); tableString.append("\t\t\\toprule\n"); tableString.append("\t\t\\textbf{System}"); for (String algorithm : algorithms) { tableString.append(" & \\textbf{" + algorithm + "}"); for (String heuristicFunction : heuristicFunctions) { tableString.append(" & \\textbf{" + algorithm + "-" + heuristicFunction + "}"); } } tableString.append("\\\\\n"); tableString.append("\t\t\\midrule\n"); for (String problem : problems) { HypervolumeHandler hypervolumeHandler = new HypervolumeHandler(); for (String algorithm : algorithms) { String mecbaDirectory = "resultado/" + algorithm.toLowerCase().replaceAll("-", "") + "/" + problem + "_Comb_" + numberOfObjectives + "obj/"; for (int i = 0; i < EXECUTIONS; i++) { hypervolumeHandler.addParetoFront( mecbaDirectory + "FUN_" + algorithm.toLowerCase().replaceAll("-", "") + "-" + problem + "-" + i + ".NaoDominadas"); } for (String heuristicFunction : heuristicFunctions) { String path = outpath; path += algorithm + "/" + numberOfObjectives + "objectives/"; String hyperheuristicDirectory = path + heuristicFunction + "/" + problem + "/"; for (int j = 0; j < EXECUTIONS; j++) { hypervolumeHandler .addParetoFront(hyperheuristicDirectory + "EXECUTION_" + j + "/FUN.txt"); } } } double[][] mecbaHypervolumes = new double[algorithms.length][EXECUTIONS]; for (double[] mecbaHypervolume : mecbaHypervolumes) { Arrays.fill(mecbaHypervolume, 0D); } double mecbaMeanHypervolume[] = new double[algorithms.length]; Arrays.fill(mecbaMeanHypervolume, 0D); double[][] hyperheuristicHypervolumes = new double[algorithms.length * heuristicFunctions.length][EXECUTIONS]; for (double[] hyperheuristicHypervolume : hyperheuristicHypervolumes) { Arrays.fill(hyperheuristicHypervolume, 0D); } double[] hyperheuristicMeanHypervolumes = new double[algorithms.length * heuristicFunctions.length]; Arrays.fill(hyperheuristicMeanHypervolumes, 0D); for (int i = 0; i < algorithms.length; i++) { String algorithm = algorithms[i]; String mecbaDirectory = "resultado/" + algorithm.toLowerCase().replaceAll("-", "") + "/" + problem + "_Comb_" + numberOfObjectives + "obj/"; for (int j = 0; j < EXECUTIONS; j++) { mecbaHypervolumes[i][j] = hypervolumeHandler .calculateHypervolume( mecbaDirectory + "FUN_" + algorithm.toLowerCase().replaceAll("-", "") + "-" + problem + "-" + j + ".NaoDominadas", numberOfObjectives); mecbaMeanHypervolume[i] += mecbaHypervolumes[i][j]; for (int k = 0; k < heuristicFunctions.length; k++) { String path = outpath; path += algorithm + "/" + numberOfObjectives + "objectives/"; String hyperheuristicDirectory = path + heuristicFunctions[k] + "/" + problem + "/"; hyperheuristicHypervolumes[i * heuristicFunctions.length + k][j] = hypervolumeHandler.calculateHypervolume( hyperheuristicDirectory + "EXECUTION_" + j + "/FUN.txt", numberOfObjectives); hyperheuristicMeanHypervolumes[i * heuristicFunctions.length + k] += hyperheuristicHypervolumes[i * heuristicFunctions.length + k][j]; } } } for (int i = 0; i < mecbaMeanHypervolume.length; i++) { mecbaMeanHypervolume[i] /= (double) EXECUTIONS; } for (int i = 0; i < hyperheuristicMeanHypervolumes.length; i++) { hyperheuristicMeanHypervolumes[i] /= (double) EXECUTIONS; } double maxMean = Double.NEGATIVE_INFINITY; String maxHeuristic = "NULL"; for (int i = 0; i < mecbaMeanHypervolume.length; i++) { double mean = mecbaMeanHypervolume[i]; if (mean > maxMean) { maxMean = mean; maxHeuristic = algorithms[i]; } } for (int i = 0; i < hyperheuristicMeanHypervolumes.length; i++) { double hyperheuristicMeanHypervolume = hyperheuristicMeanHypervolumes[i]; if (hyperheuristicMeanHypervolume > maxMean) { maxMean = hyperheuristicMeanHypervolume; maxHeuristic = algorithms[i / heuristicFunctions.length] + "-" + heuristicFunctions[i % heuristicFunctions.length]; } } HashMap<String, double[]> values = new HashMap<>(); for (int i = 0; i < algorithms.length; i++) { String algorithm = algorithms[i]; values.put(algorithm, mecbaHypervolumes[i]); } for (int i = 0; i < hyperheuristicHypervolumes.length; i++) { double[] hyperheuristicHypervolume = hyperheuristicHypervolumes[i]; String heuristicFunction = heuristicFunctions[i % heuristicFunctions.length]; String algorithm = algorithms[i / heuristicFunctions.length]; values.put(algorithm + "-" + heuristicFunction, hyperheuristicHypervolume); } HashMap<String, HashMap<String, Boolean>> result = KruskalWallisTest.test(values); tableString.append("\t\t" + problem.replaceAll("\\_", "\\\\_")); for (int i = 0; i < algorithms.length; i++) { String algorithm = algorithms[i]; tableString.append(" & "); if (algorithm.equals(maxHeuristic) || !result.get(algorithm).get(maxHeuristic)) { tableString.append("\\textbf{"); } tableString.append(decimalFormatter.format(mecbaMeanHypervolume[i])); tableString.append(" (") .append(decimalFormatter.format(standardDeviation.evaluate(mecbaHypervolumes[i]))) .append(")"); if (algorithm.equals(maxHeuristic) || !result.get(algorithm).get(maxHeuristic)) { tableString.append("}"); } for (int j = 0; j < heuristicFunctions.length; j++) { String heuristicFunction = algorithm + "-" + heuristicFunctions[j]; tableString.append(" & "); if (heuristicFunction.equals(maxHeuristic) || !result.get(heuristicFunction).get(maxHeuristic)) { tableString.append("\\textbf{"); } tableString.append(decimalFormatter .format(hyperheuristicMeanHypervolumes[i * heuristicFunctions.length + j])); tableString.append(" (") .append(decimalFormatter.format(standardDeviation.evaluate( hyperheuristicHypervolumes[i * heuristicFunctions.length + j]))) .append(")"); if (heuristicFunction.equals(maxHeuristic) || !result.get(heuristicFunction).get(maxHeuristic)) { tableString.append("}"); } } } tableString.append("\\\\\n"); } tableString.append("\t\t\\bottomrule\n"); tableString.append("\t\\end{tabulary}\n"); tableString.append("\\end{table}\n"); } tableString.append("\n" + "\\end{landscape}\n" + "\n" + "\\end{document}\n"); fileWriter.write(tableString.toString().replaceAll("ChoiceFunction", "CF") .replaceAll("MultiArmedBandit", "MAB")); } }
From source file:gnu.trove.map.custom_hash.TObjectByteCustomHashMap.java
/** * Creates a new <code>TObjectByteCustomHashMap</code> that contains the entries * in the map passed to it./*www . jav a2 s.c o m*/ * * @param map the <tt>TObjectByteMap</tt> to be copied. */ public TObjectByteCustomHashMap(HashingStrategy<K> strategy, TObjectByteMap<K> map) { this(strategy, map.size(), 0.5f, map.getNoEntryValue()); if (map instanceof TObjectByteCustomHashMap) { TObjectByteCustomHashMap hashmap = (TObjectByteCustomHashMap) map; this._loadFactor = hashmap._loadFactor; this.no_entry_value = hashmap.no_entry_value; this.strategy = hashmap.strategy; //noinspection RedundantCast if (this.no_entry_value != (byte) 0) { Arrays.fill(_values, this.no_entry_value); } setUp((int) Math.ceil(DEFAULT_CAPACITY / _loadFactor)); } putAll(map); }
From source file:gnu.trove.map.custom_hash.TObjectCharCustomHashMap.java
/** * Creates a new <code>TObjectCharCustomHashMap</code> that contains the entries * in the map passed to it.//from w w w . j ava 2s .c o m * * @param map the <tt>TObjectCharMap</tt> to be copied. */ public TObjectCharCustomHashMap(HashingStrategy<K> strategy, TObjectCharMap<K> map) { this(strategy, map.size(), 0.5f, map.getNoEntryValue()); if (map instanceof TObjectCharCustomHashMap) { TObjectCharCustomHashMap hashmap = (TObjectCharCustomHashMap) map; this._loadFactor = hashmap._loadFactor; this.no_entry_value = hashmap.no_entry_value; this.strategy = hashmap.strategy; //noinspection RedundantCast if (this.no_entry_value != (char) 0) { Arrays.fill(_values, this.no_entry_value); } setUp((int) Math.ceil(DEFAULT_CAPACITY / _loadFactor)); } putAll(map); }
From source file:org.cryptomator.crypto.engine.impl.CryptorImpl.java
@Override public byte[] writeKeysToMasterkeyFile(CharSequence passphrase) { final byte[] scryptSalt = new byte[SCRYPT_SALT_LENGTH]; randomSource.nextBytes(scryptSalt);/*from w ww .j av a 2 s. co m*/ final byte[] kekBytes = Scrypt.scrypt(passphrase, scryptSalt, SCRYPT_COST_PARAM, SCRYPT_BLOCK_SIZE, KEYLENGTH_IN_BYTES); final byte[] wrappedEncryptionKey; final byte[] wrappedMacKey; try { final SecretKey kek = new SecretKeySpec(kekBytes, ENCRYPTION_ALG); wrappedEncryptionKey = AesKeyWrap.wrap(kek, encryptionKey); wrappedMacKey = AesKeyWrap.wrap(kek, macKey); } finally { Arrays.fill(kekBytes, (byte) 0x00); } final Mac mac = new ThreadLocalMac(macKey, MAC_ALG).get(); final byte[] versionMac = mac .doFinal(ByteBuffer.allocate(Integer.BYTES).putInt(CURRENT_VAULT_VERSION).array()); final KeyFile keyfile = new KeyFile(); keyfile.setVersion(CURRENT_VAULT_VERSION); keyfile.setScryptSalt(scryptSalt); keyfile.setScryptCostParam(SCRYPT_COST_PARAM); keyfile.setScryptBlockSize(SCRYPT_BLOCK_SIZE); keyfile.setEncryptionMasterKey(wrappedEncryptionKey); keyfile.setMacMasterKey(wrappedMacKey); keyfile.setVersionMac(versionMac); try { final ObjectMapper om = new ObjectMapper(); return om.writeValueAsBytes(keyfile); } catch (JsonProcessingException e) { throw new IllegalArgumentException("Unable to create JSON from " + keyfile, e); } }
From source file:investiagenofx2.view.InvestiaGenOFXController.java
/** * Initializes the controller class.//from ww w . j av a 2 s .c o m * * @param url * @param rb */ @Override public void initialize(URL url, ResourceBundle rb) { txt_investiaURL.setText(PropertiesInit.getInvestiaURL()); dtp_lastDate.setValue(LocalDate.parse(PropertiesInit.getLastGenUsedDate())); String[] clientNums = PropertiesInit.getClientNumList().split(","); for (String clientNum : clientNums) { if (!clientNum.trim().isEmpty()) { cbo_clientNum.getItems().add(clientNum.trim()); } } Arrays.fill(linkAccountToLocalAccountIndex, -1); resetControls(); cbo_clientNum.getEditor().addEventFilter(KeyEvent.KEY_PRESSED, event -> { if (event.getCode() == KeyCode.DELETE) { cbo_clientNum.getItems().remove(cbo_clientNum.getValue()); event.consume(); } }); dtp_lastDate.setConverter(new StringConverter<LocalDate>() { final String pattern = "yyyy-MM-dd"; final DateTimeFormatter dateFormatter = DateTimeFormatter.ofPattern(pattern); { dtp_lastDate.setPromptText(pattern.toLowerCase()); } @Override public String toString(LocalDate date) { if (date != null) { return dateFormatter.format(date); } else { return ""; } } @Override public LocalDate fromString(String string) { if (string != null && !string.isEmpty()) { return LocalDate.parse(string, dateFormatter); } else { return null; } } }); //This deals with the bug located here where the datepicker value is not updated on focus lost //https://bugs.openjdk.java.net/browse/JDK-8092295?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel dtp_lastDate.focusedProperty().addListener(new ChangeListener<Boolean>() { @Override public void changed(ObservableValue<? extends Boolean> observable, Boolean oldValue, Boolean newValue) { if (!newValue) { dtp_lastDate .setValue(dtp_lastDate.getConverter().fromString(dtp_lastDate.getEditor().getText())); } } }); }
From source file:com.genericworkflownodes.knime.generic_node.GenericKnimeNodeModel.java
/** * Creates a list of output port types for the nodes. * //from w w w . java2 s . c om * @param ports * The port list from which the output ports should be generated. * @return A list of output port types for the nodes. */ private static PortType[] createOPOs(List<Port> ports) { PortType[] portTypes = new PortType[ports.size()]; Arrays.fill(portTypes, IURIPortObject.TYPE); for (int i = 0; i < ports.size(); i++) { if (ports.get(i).isOptional()) { portTypes[i] = OPTIONAL_PORT_TYPE; } } return portTypes; }
From source file:gnu.trove.map.custom_hash.TObjectFloatCustomHashMap.java
/** * Creates a new <code>TObjectFloatCustomHashMap</code> that contains the entries * in the map passed to it./*w ww . ja va 2s. c o m*/ * * @param map the <tt>TObjectFloatMap</tt> to be copied. */ public TObjectFloatCustomHashMap(HashingStrategy<K> strategy, TObjectFloatMap<K> map) { this(strategy, map.size(), 0.5f, map.getNoEntryValue()); if (map instanceof TObjectFloatCustomHashMap) { TObjectFloatCustomHashMap hashmap = (TObjectFloatCustomHashMap) map; this._loadFactor = hashmap._loadFactor; this.no_entry_value = hashmap.no_entry_value; this.strategy = hashmap.strategy; //noinspection RedundantCast if (this.no_entry_value != (float) 0) { Arrays.fill(_values, this.no_entry_value); } setUp((int) Math.ceil(DEFAULT_CAPACITY / _loadFactor)); } putAll(map); }
From source file:gnu.trove.map.custom_hash.TObjectDoubleCustomHashMap.java
/** * Creates a new <code>TObjectDoubleCustomHashMap</code> that contains the entries * in the map passed to it.//from w w w. j a v a 2s . c o m * * @param map the <tt>TObjectDoubleMap</tt> to be copied. */ public TObjectDoubleCustomHashMap(HashingStrategy<K> strategy, TObjectDoubleMap<K> map) { this(strategy, map.size(), 0.5f, map.getNoEntryValue()); if (map instanceof TObjectDoubleCustomHashMap) { TObjectDoubleCustomHashMap hashmap = (TObjectDoubleCustomHashMap) map; this._loadFactor = hashmap._loadFactor; this.no_entry_value = hashmap.no_entry_value; this.strategy = hashmap.strategy; //noinspection RedundantCast if (this.no_entry_value != (double) 0) { Arrays.fill(_values, this.no_entry_value); } setUp((int) Math.ceil(DEFAULT_CAPACITY / _loadFactor)); } putAll(map); }
From source file:es.upv.riromu.arbre.main.MainActivity.java
protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); Arrays.fill(state, false); Arrays.fill(colours, 255);/*from ww w .jav a 2 s .c om*/ compressRatio = 100; // state[TREAT_IMAGE]=false; // image = BitmapFactory.decodeResource(getResources(), R.drawable.platanus_hispanica); Log.i(TAG, "Trying to load OpenCV library"); if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_6, this, mLoaderCallback)) { Log.e(TAG, "Cannot connect to OpenCV Manager"); } setContentView(R.layout.intro_activity); ImageView imv = (ImageView) findViewById(R.id.image_intro); imv.setImageDrawable(getResources().getDrawable(R.drawable.platanus_hispanica)); // create RangeSeekBar as Integer range between 0 and 180 SharedPreferences sharedPref = PreferenceManager.getDefaultSharedPreferences(this); String rangepreference = (String) sharedPref.getString("rangepreference", "H"); RangeSeekBar<Integer> seekBar = null; if (rangepreference.equals("H")) seekBar = new RangeSeekBar<Integer>(Integer.valueOf(sharedPref.getString("minH_preference", "60")), Integer.valueOf(sharedPref.getString("maxH_preference", "130")), this); if (rangepreference.equals("S")) seekBar = new RangeSeekBar<Integer>(Integer.valueOf(sharedPref.getString("minS_preference", "0")), Integer.valueOf(sharedPref.getString("maxS_preference", "255")), this); if (rangepreference.equals("V")) seekBar = new RangeSeekBar<Integer>(Integer.valueOf(sharedPref.getString("minV_preference", "0")), Integer.valueOf(sharedPref.getString("maxV_preference", "255")), this); //RangeSeekBar<Integer> seekBar = new RangeSeekBar<Integer>(MIN_TH, MAX_TH, this); seekBar.setId(R.id.rangeseekbar); seekBar.setOnRangeSeekBarChangeListener(new RangeSeekBar.OnRangeSeekBarChangeListener<Integer>() { @Override public void onRangeSeekBarValuesChanged(RangeSeekBar<?> bar, Integer minValue, Integer maxValue) { rangeSeekBarChanged(minValue, maxValue); } }); setVisibility(); // add RangeSeekBar to pre-defined layout ViewGroup layout = (ViewGroup) findViewById(R.id.seekbarholder); layout.addView(seekBar); //locationManager = (LocationManager) getSystemService(Context.LOCATION_SERVICE); }
From source file:com.opendoorlogistics.core.geometry.ImportShapefile.java
/** * Import the shapefile. All geometry is transformed into WGS84. * //w ww.ja v a 2 s .co m * @param file * @param ds */ @SuppressWarnings("deprecation") public static HashMap<ShapefileLink, ODLGeom> importShapefile(File file, boolean isLinkedGeometry, ODLDatastoreAlterable<? extends ODLTableAlterable> ds, boolean returnGeometry) { Spatial.initSpatial(); file = RelativeFiles.validateRelativeFiles(file.getPath(), AppConstants.SHAPEFILES_DIRECTORY); if (file == null) { return new HashMap<>(); } // check if we're actually opening a render optimised geometry file... do something if we are? String ext = FilenameUtils.getExtension(file.getAbsolutePath()); boolean isRog = Strings.equalsStd(ext, RogReaderUtils.RENDER_GEOMETRY_FILE_EXT); File originalFile = file; List<ODLRenderOptimisedGeom> rogs = null; if (isRog) { rogs = new LargeList<>(); RogSingleton.singleton().createLoader(file, rogs); // get the shapefile from the main one String shpFile = FilenameUtils.removeExtension(file.getPath()) + ".shp"; file = new File(shpFile); } SimpleFeatureIterator it = null; DataStore shapefile = null; // create return object HashMap<ShapefileLink, ODLGeom> ret = null; if (returnGeometry) { ret = new HashMap<>(); } try { shapefile = openDataStore(file); if (shapefile == null) { throw new RuntimeException("Could not open shapefile: " + file); } // get the linkfile using the *original file*, not the potentially redirected file String linkFile = RelativeFiles.getFilenameToSaveInLink(originalFile, AppConstants.SHAPEFILES_DIRECTORY); for (String type : shapefile.getTypeNames()) { // make table ODLTableAlterable table = null; if (ds != null) { String tableName = type; if (TableUtils.findTable(ds, tableName) != null) { tableName = TableUtils.getUniqueNumberedTableName(type, ds); } table = ds.createTable(type, -1); } // add columns for each usable feature SimpleFeatureType schema = shapefile.getSchema(type); int nAttrib = schema.getAttributeCount(); int[] mapped = new int[nAttrib]; Arrays.fill(mapped, -1); if (ds != null) { for (int i = 0; i < nAttrib; i++) { AttributeType attributeType = schema.getType(i); Class<?> binding = attributeType.getBinding(); ODLColumnType colType = BeanTypeConversion.getInternalType(binding); if (colType != null) { String attributeName = schema.getDescriptor(i).getLocalName(); if (table.addColumn(i, attributeName, colType, 0) != -1) { mapped[i] = table.getColumnCount() - 1; if (colType == ODLColumnType.GEOM) { table.setColumnTags(mapped[i], Strings.toTreeSet(PredefinedTags.GEOMETRY)); } } } } } // get coord transform to turn into wgs84 long-lat MathTransform toWGS84 = getTransformToWGS84(shapefile, type); SimpleFeatureSource source = shapefile.getFeatureSource(type); SimpleFeatureCollection collection = source.getFeatures(); // parse all features recording all attributes, including geometry it = collection.features(); int objectIndex = 0; while (it.hasNext()) { SimpleFeature feature = it.next(); //System.out.println(feature.getID()); if (SimpleFeature.class.isInstance(feature)) { SimpleFeature sf = (SimpleFeature) feature; // create row if we're outputting to a datastore int row = -1; if (ds != null) { row = table.createEmptyRow(-1); } for (int i = 0; i < nAttrib; i++) { Object value = sf.getAttribute(i); // process geometry ShapefileLink link = null; if (value != null && Geometry.class.isInstance(value)) { if (!isLinkedGeometry || ret != null) { if (rogs != null) { value = rogs.get(objectIndex); } else { // Transform the geometry to wgs84 if we need it value = JTS.transform((Geometry) value, toWGS84); value = new ODLLoadedGeometry((Geometry) value); } } else { // Geometry not needed value = null; } // Create geometry link link = new ShapefileLink(linkFile, type, sf.getID()); // Save the transformed geometry if flagged if (ret != null) { ret.put(link, (ODLGeom) value); } // If we're using linked geometry the value for the table is the link if (isLinkedGeometry) { value = new ODLShapefileLinkGeom(link); } } // save to table if mapped int col = mapped[i]; if (col != -1) { ODLColumnType odlType = table.getColumnType(col); value = ColumnValueProcessor.convertToMe(odlType, value); table.setValueAt(value, row, col); } } objectIndex++; } else { throw new RuntimeException(); } } } } catch (Throwable e) { throw new RuntimeException(e); } finally { if (it != null) { it.close(); } if (shapefile != null) { shapefile.dispose(); } } return ret; }