List of usage examples for java.lang Double MIN_VALUE
double MIN_VALUE
To view the source code for java.lang Double MIN_VALUE.
Click Source Link
From source file:com.versobit.weatherdoge.WeatherUtil.java
private static WeatherResult getWeatherFromOWM(double latitude, double longitude, String location) { try {/*w w w . j ava2s . com*/ String query; if (latitude == Double.MIN_VALUE && longitude == Double.MIN_VALUE) { if (location == null) { return new WeatherResult(null, WeatherResult.ERROR_THROWABLE, "No valid location parameters.", new IllegalArgumentException()); } query = "q=" + URLEncoder.encode(location, "UTF-8"); } else { query = "lat=" + URLEncoder.encode(String.valueOf(latitude), "UTF-8") + "&lon=" + URLEncoder.encode(String.valueOf(longitude), "UTF-8"); } query += "&APPID=" + URLEncoder.encode(BuildConfig.OWM_APPID, "UTF-8"); URL url = new URL("http://api.openweathermap.org/data/2.5/weather?" + query); HttpURLConnection connection = (HttpURLConnection) url.openConnection(); try { JSONObject response = new JSONObject(IOUtils.toString(connection.getInputStream())); if (response.getInt("cod") != HttpURLConnection.HTTP_OK) { // OWM has HTTP error codes that are passed through an API field, the actual HTTP // error code is always 200... return new WeatherResult(null, WeatherResult.ERROR_API, response.getString("cod") + ": " + response.getString("message"), null); } JSONObject weather = response.getJSONArray("weather").getJSONObject(0); JSONObject main = response.getJSONObject("main"); double temp = main.getDouble("temp") - 273.15d; String condition = WordUtils.capitalize(weather.getString("description").trim()); String image = weather.getString("icon"); if (location == null || location.isEmpty()) { location = response.getString("name"); } return new WeatherResult(new WeatherData(temp, condition, image, latitude, longitude, location, new Date(), Source.OPEN_WEATHER_MAP), WeatherResult.ERROR_NONE, null, null); } finally { connection.disconnect(); } } catch (Exception ex) { return new WeatherResult(null, WeatherResult.ERROR_THROWABLE, ex.getMessage(), ex); } }
From source file:org.apache.hadoop.hive.hbase.HBaseTestSetup.java
private void createHBaseTable() throws IOException { final String HBASE_TABLE_NAME = "HiveExternalTable"; HTableDescriptor htableDesc = new HTableDescriptor(HBASE_TABLE_NAME.getBytes()); HColumnDescriptor hcolDesc = new HColumnDescriptor("cf".getBytes()); htableDesc.addFamily(hcolDesc);/*from www . j av a 2 s . c om*/ boolean[] booleans = new boolean[] { true, false, true }; byte[] bytes = new byte[] { Byte.MIN_VALUE, -1, Byte.MAX_VALUE }; short[] shorts = new short[] { Short.MIN_VALUE, -1, Short.MAX_VALUE }; int[] ints = new int[] { Integer.MIN_VALUE, -1, Integer.MAX_VALUE }; long[] longs = new long[] { Long.MIN_VALUE, -1, Long.MAX_VALUE }; String[] strings = new String[] { "Hadoop, HBase,", "Hive", "Test Strings" }; float[] floats = new float[] { Float.MIN_VALUE, -1.0F, Float.MAX_VALUE }; double[] doubles = new double[] { Double.MIN_VALUE, -1.0, Double.MAX_VALUE }; HBaseAdmin hbaseAdmin = null; HTableInterface htable = null; try { hbaseAdmin = new HBaseAdmin(hbaseConn.getConfiguration()); if (Arrays.asList(hbaseAdmin.listTables()).contains(htableDesc)) { // if table is already in there, don't recreate. return; } hbaseAdmin.createTable(htableDesc); htable = hbaseConn.getTable(HBASE_TABLE_NAME); // data Put[] puts = new Put[] { new Put("key-1".getBytes()), new Put("key-2".getBytes()), new Put("key-3".getBytes()) }; // store data for (int i = 0; i < puts.length; i++) { puts[i].add("cf".getBytes(), "cq-boolean".getBytes(), Bytes.toBytes(booleans[i])); puts[i].add("cf".getBytes(), "cq-byte".getBytes(), new byte[] { bytes[i] }); puts[i].add("cf".getBytes(), "cq-short".getBytes(), Bytes.toBytes(shorts[i])); puts[i].add("cf".getBytes(), "cq-int".getBytes(), Bytes.toBytes(ints[i])); puts[i].add("cf".getBytes(), "cq-long".getBytes(), Bytes.toBytes(longs[i])); puts[i].add("cf".getBytes(), "cq-string".getBytes(), Bytes.toBytes(strings[i])); puts[i].add("cf".getBytes(), "cq-float".getBytes(), Bytes.toBytes(floats[i])); puts[i].add("cf".getBytes(), "cq-double".getBytes(), Bytes.toBytes(doubles[i])); htable.put(puts[i]); } } finally { if (htable != null) htable.close(); if (hbaseAdmin != null) hbaseAdmin.close(); } }
From source file:org.apache.camel.dataformat.bindy.BindyAbstractFactory.java
public static Object getDefaultValueForPrimitive(Class<?> clazz) throws Exception { if (clazz == byte.class) { return Byte.MIN_VALUE; } else if (clazz == short.class) { return Short.MIN_VALUE; } else if (clazz == int.class) { return Integer.MIN_VALUE; } else if (clazz == long.class) { return Long.MIN_VALUE; } else if (clazz == float.class) { return Float.MIN_VALUE; } else if (clazz == double.class) { return Double.MIN_VALUE; } else if (clazz == char.class) { return Character.MIN_VALUE; } else if (clazz == boolean.class) { return false; } else {/*from w ww . j a v a 2 s .c om*/ return null; } }
From source file:IK.AbstractArmature.java
public void setDefaultDampening(double damp) { this.dampening = Math.min(Math.abs(Double.MIN_VALUE), Math.abs(damp)); }
From source file:nl.uva.sne.classifiers.CosineSimilarity.java
@Override public void saveClusterFile(String modelDir, String dataDirPath, String filePath) throws IOException, ParseException { try {/*from w ww.jav a 2 s.c om*/ Map<String, Map<String, Double>> classesMap = buildClassesMap(modelDir, dataDirPath); double minScore = Double.MAX_VALUE; double maxScore = Double.MIN_VALUE; for (String docName : classesMap.keySet()) { StringBuilder line = new StringBuilder(); line.append(docName).append(","); Map<String, Double> res = classesMap.get(docName); Set<String> classNames = res.keySet(); for (String cName : classNames) { Double score = res.get(cName); if (score > maxScore) { maxScore = score; } else if (score < minScore) { minScore = score; } } } StringBuilder header = new StringBuilder(); boolean headerSet = false; header.append("docName").append(","); try (PrintWriter out = new PrintWriter(filePath + File.separator + "result.csv")) { for (String docName : classesMap.keySet()) { StringBuilder line = new StringBuilder(); line.append(docName).append(","); Map<String, Double> res = classesMap.get(docName); Set<String> classNames = res.keySet(); for (String cName : classNames) { if (!headerSet) { header.append(cName).append(","); } Double score = res.get(cName); double scaledValue = 2 + (score - minScore) * (5 - 2) / (maxScore - minScore); // double scaledValue = (((maxScore - minScore) * (score - 2.0)) / (5.0 - 2.0)) + minScore; line.append(Math.round(scaledValue)).append(","); } if (!headerSet) { header.deleteCharAt(header.length() - 1); header.setLength(header.length()); headerSet = true; out.print(header + "\n"); } line.deleteCharAt(line.length() - 1); line.setLength(line.length()); // System.err.println(line); out.print(line + "\n"); } } } catch (JWNLException ex) { Logger.getLogger(CosineSimilarity.class.getName()).log(Level.SEVERE, null, ex); } }
From source file:ml.shifu.shifu.core.binning.UpdateBinningInfoReducer.java
@Override protected void reduce(IntWritable key, Iterable<BinningInfoWritable> values, Context context) throws IOException, InterruptedException { long start = System.currentTimeMillis(); double sum = 0d; double squaredSum = 0d; double tripleSum = 0d; double quarticSum = 0d; double p25th = 0d; double median = 0d; double p75th = 0d; long count = 0L, missingCount = 0L; double min = Double.MAX_VALUE, max = Double.MIN_VALUE; List<Double> binBoundaryList = null; List<String> binCategories = null; long[] binCountPos = null; long[] binCountNeg = null; double[] binWeightPos = null; double[] binWeightNeg = null; long[] binCountTotal = null; int columnConfigIndex = key.get() >= this.columnConfigList.size() ? key.get() % this.columnConfigList.size() : key.get();/*w ww.ja v a2s .c om*/ ColumnConfig columnConfig = this.columnConfigList.get(columnConfigIndex); HyperLogLogPlus hyperLogLogPlus = null; Set<String> fis = new HashSet<String>(); long totalCount = 0, invalidCount = 0, validNumCount = 0; int binSize = 0; for (BinningInfoWritable info : values) { if (info.isEmpty()) { // mapper has no stats, skip it continue; } CountAndFrequentItemsWritable cfiw = info.getCfiw(); totalCount += cfiw.getCount(); invalidCount += cfiw.getInvalidCount(); validNumCount += cfiw.getValidNumCount(); fis.addAll(cfiw.getFrequetItems()); if (hyperLogLogPlus == null) { hyperLogLogPlus = HyperLogLogPlus.Builder.build(cfiw.getHyperBytes()); } else { try { hyperLogLogPlus = (HyperLogLogPlus) hyperLogLogPlus .merge(HyperLogLogPlus.Builder.build(cfiw.getHyperBytes())); } catch (CardinalityMergeException e) { throw new RuntimeException(e); } } if (columnConfig.isHybrid() && binBoundaryList == null && binCategories == null) { binBoundaryList = info.getBinBoundaries(); binCategories = info.getBinCategories(); binSize = binBoundaryList.size() + binCategories.size(); binCountPos = new long[binSize + 1]; binCountNeg = new long[binSize + 1]; binWeightPos = new double[binSize + 1]; binWeightNeg = new double[binSize + 1]; binCountTotal = new long[binSize + 1]; } else if (columnConfig.isNumerical() && binBoundaryList == null) { binBoundaryList = info.getBinBoundaries(); binSize = binBoundaryList.size(); binCountPos = new long[binSize + 1]; binCountNeg = new long[binSize + 1]; binWeightPos = new double[binSize + 1]; binWeightNeg = new double[binSize + 1]; binCountTotal = new long[binSize + 1]; } else if (columnConfig.isCategorical() && binCategories == null) { binCategories = info.getBinCategories(); binSize = binCategories.size(); binCountPos = new long[binSize + 1]; binCountNeg = new long[binSize + 1]; binWeightPos = new double[binSize + 1]; binWeightNeg = new double[binSize + 1]; binCountTotal = new long[binSize + 1]; } count += info.getTotalCount(); missingCount += info.getMissingCount(); // for numeric, such sums are OK, for categorical, such values are all 0, should be updated by using // binCountPos and binCountNeg sum += info.getSum(); squaredSum += info.getSquaredSum(); tripleSum += info.getTripleSum(); quarticSum += info.getQuarticSum(); if (Double.compare(max, info.getMax()) < 0) { max = info.getMax(); } if (Double.compare(min, info.getMin()) > 0) { min = info.getMin(); } for (int i = 0; i < (binSize + 1); i++) { binCountPos[i] += info.getBinCountPos()[i]; binCountNeg[i] += info.getBinCountNeg()[i]; binWeightPos[i] += info.getBinWeightPos()[i]; binWeightNeg[i] += info.getBinWeightNeg()[i]; binCountTotal[i] += info.getBinCountPos()[i]; binCountTotal[i] += info.getBinCountNeg()[i]; } } if (columnConfig.isNumerical()) { long p25Count = count / 4; long medianCount = p25Count * 2; long p75Count = p25Count * 3; p25th = min; median = min; p75th = min; int currentCount = 0; for (int i = 0; i < binBoundaryList.size(); i++) { double left = getCutoffBoundary(binBoundaryList.get(i), max, min); double right = ((i == binBoundaryList.size() - 1) ? max : getCutoffBoundary(binBoundaryList.get(i + 1), max, min)); if (p25Count >= currentCount && p25Count < currentCount + binCountTotal[i]) { p25th = ((p25Count - currentCount) / (double) binCountTotal[i]) * (right - left) + left; } if (medianCount >= currentCount && medianCount < currentCount + binCountTotal[i]) { median = ((medianCount - currentCount) / (double) binCountTotal[i]) * (right - left) + left; } if (p75Count >= currentCount && p75Count < currentCount + binCountTotal[i]) { p75th = ((p75Count - currentCount) / (double) binCountTotal[i]) * (right - left) + left; // when get 75 percentile stop it break; } currentCount += binCountTotal[i]; } LOG.info("Coloumn num is {}, p25 value is {}, median value is {}, p75 value is {}", columnConfig.getColumnNum(), p25th, median, p75th); } LOG.info("Coloumn num is {}, columnType value is {}, cateMaxNumBin is {}, binCategory size is {}", columnConfig.getColumnNum(), columnConfig.getColumnType(), modelConfig.getStats().getCateMaxNumBin(), (CollectionUtils.isNotEmpty(columnConfig.getBinCategory()) ? columnConfig.getBinCategory().size() : 0)); // To merge categorical binning if (columnConfig.isCategorical() && modelConfig.getStats().getCateMaxNumBin() > 0 && CollectionUtils.isNotEmpty(binCategories) && binCategories.size() > modelConfig.getStats().getCateMaxNumBin()) { // only category size large then expected max bin number CateBinningStats cateBinningStats = rebinCategoricalValues( new CateBinningStats(binCategories, binCountPos, binCountNeg, binWeightPos, binWeightNeg)); LOG.info("For variable - {}, {} bins is rebined to {} bins", columnConfig.getColumnName(), binCategories.size(), cateBinningStats.binCategories.size()); binCategories = cateBinningStats.binCategories; binCountPos = cateBinningStats.binCountPos; binCountNeg = cateBinningStats.binCountNeg; binWeightPos = cateBinningStats.binWeightPos; binWeightNeg = cateBinningStats.binWeightNeg; } double[] binPosRate; if (modelConfig.isRegression()) { binPosRate = computePosRate(binCountPos, binCountNeg); } else { // for multiple classfication, use rate of categories to compute a value binPosRate = computeRateForMultiClassfication(binCountPos); } String binBounString = null; if (columnConfig.isHybrid()) { if (binCategories.size() > this.maxCateSize) { LOG.warn("Column {} {} with invalid bin category size.", key.get(), columnConfig.getColumnName(), binCategories.size()); return; } binBounString = binBoundaryList.toString(); binBounString += Constants.HYBRID_BIN_STR_DILIMETER + Base64Utils.base64Encode( "[" + StringUtils.join(binCategories, CalculateStatsUDF.CATEGORY_VAL_SEPARATOR) + "]"); } else if (columnConfig.isCategorical()) { if (binCategories.size() > this.maxCateSize) { LOG.warn("Column {} {} with invalid bin category size.", key.get(), columnConfig.getColumnName(), binCategories.size()); return; } binBounString = Base64Utils.base64Encode( "[" + StringUtils.join(binCategories, CalculateStatsUDF.CATEGORY_VAL_SEPARATOR) + "]"); // recompute such value for categorical variables min = Double.MAX_VALUE; max = Double.MIN_VALUE; sum = 0d; squaredSum = 0d; for (int i = 0; i < binPosRate.length; i++) { if (!Double.isNaN(binPosRate[i])) { if (Double.compare(max, binPosRate[i]) < 0) { max = binPosRate[i]; } if (Double.compare(min, binPosRate[i]) > 0) { min = binPosRate[i]; } long binCount = binCountPos[i] + binCountNeg[i]; sum += binPosRate[i] * binCount; double squaredVal = binPosRate[i] * binPosRate[i]; squaredSum += squaredVal * binCount; tripleSum += squaredVal * binPosRate[i] * binCount; quarticSum += squaredVal * squaredVal * binCount; } } } else { if (binBoundaryList.size() == 0) { LOG.warn("Column {} {} with invalid bin boundary size.", key.get(), columnConfig.getColumnName(), binBoundaryList.size()); return; } binBounString = binBoundaryList.toString(); } ColumnMetrics columnCountMetrics = null; ColumnMetrics columnWeightMetrics = null; if (modelConfig.isRegression()) { columnCountMetrics = ColumnStatsCalculator.calculateColumnMetrics(binCountNeg, binCountPos); columnWeightMetrics = ColumnStatsCalculator.calculateColumnMetrics(binWeightNeg, binWeightPos); } // To make it be consistent with SPDT, missingCount is excluded to compute mean, stddev ... long realCount = this.statsExcludeMissingValue ? (count - missingCount) : count; double mean = sum / realCount; double stdDev = Math.sqrt(Math.abs((squaredSum - (sum * sum) / realCount + EPS) / (realCount - 1))); double aStdDev = Math.sqrt(Math.abs((squaredSum - (sum * sum) / realCount + EPS) / realCount)); double skewness = ColumnStatsCalculator.computeSkewness(realCount, mean, aStdDev, sum, squaredSum, tripleSum); double kurtosis = ColumnStatsCalculator.computeKurtosis(realCount, mean, aStdDev, sum, squaredSum, tripleSum, quarticSum); sb.append(key.get()) // column id .append(Constants.DEFAULT_DELIMITER).append(binBounString) // column bins .append(Constants.DEFAULT_DELIMITER).append(Arrays.toString(binCountNeg)) // bin count negative .append(Constants.DEFAULT_DELIMITER).append(Arrays.toString(binCountPos)) // bin count positive .append(Constants.DEFAULT_DELIMITER).append(Arrays.toString(new double[0])) // deprecated .append(Constants.DEFAULT_DELIMITER).append(Arrays.toString(binPosRate)) // bin positive rate .append(Constants.DEFAULT_DELIMITER) .append(columnCountMetrics == null ? "" : df.format(columnCountMetrics.getKs())) // KS .append(Constants.DEFAULT_DELIMITER) .append(columnCountMetrics == null ? "" : df.format(columnCountMetrics.getIv())) // IV .append(Constants.DEFAULT_DELIMITER).append(df.format(max)) // max .append(Constants.DEFAULT_DELIMITER).append(df.format(min)) // min .append(Constants.DEFAULT_DELIMITER).append(df.format(mean)) // mean .append(Constants.DEFAULT_DELIMITER).append(df.format(stdDev)) // standard deviation .append(Constants.DEFAULT_DELIMITER).append(columnConfig.getColumnType().toString()) // column type .append(Constants.DEFAULT_DELIMITER).append(median) // median value ? .append(Constants.DEFAULT_DELIMITER).append(missingCount) // missing count .append(Constants.DEFAULT_DELIMITER).append(count) // count .append(Constants.DEFAULT_DELIMITER).append(missingCount * 1.0d / count) // missing ratio .append(Constants.DEFAULT_DELIMITER).append(Arrays.toString(binWeightNeg)) // bin weighted negative .append(Constants.DEFAULT_DELIMITER).append(Arrays.toString(binWeightPos)) // bin weighted positive .append(Constants.DEFAULT_DELIMITER) .append(columnCountMetrics == null ? "" : columnCountMetrics.getWoe()) // WOE .append(Constants.DEFAULT_DELIMITER) .append(columnWeightMetrics == null ? "" : columnWeightMetrics.getWoe()) // weighted WOE .append(Constants.DEFAULT_DELIMITER) .append(columnWeightMetrics == null ? "" : columnWeightMetrics.getKs()) // weighted KS .append(Constants.DEFAULT_DELIMITER) .append(columnWeightMetrics == null ? "" : columnWeightMetrics.getIv()) // weighted IV .append(Constants.DEFAULT_DELIMITER) .append(columnCountMetrics == null ? Arrays.toString(new double[binSize + 1]) : columnCountMetrics.getBinningWoe().toString()) // bin WOE .append(Constants.DEFAULT_DELIMITER) .append(columnWeightMetrics == null ? Arrays.toString(new double[binSize + 1]) : columnWeightMetrics.getBinningWoe().toString()) // bin weighted WOE .append(Constants.DEFAULT_DELIMITER).append(skewness) // skewness .append(Constants.DEFAULT_DELIMITER).append(kurtosis) // kurtosis .append(Constants.DEFAULT_DELIMITER).append(totalCount) // total count .append(Constants.DEFAULT_DELIMITER).append(invalidCount) // invalid count .append(Constants.DEFAULT_DELIMITER).append(validNumCount) // valid num count .append(Constants.DEFAULT_DELIMITER).append(hyperLogLogPlus.cardinality()) // cardinality .append(Constants.DEFAULT_DELIMITER).append(Base64Utils.base64Encode(limitedFrequentItems(fis))) // frequent items .append(Constants.DEFAULT_DELIMITER).append(p25th) // the 25 percentile value .append(Constants.DEFAULT_DELIMITER).append(p75th); outputValue.set(sb.toString()); context.write(NullWritable.get(), outputValue); sb.delete(0, sb.length()); LOG.debug("Time:{}", (System.currentTimeMillis() - start)); }
From source file:android.support.test.espresso.web.model.ModelCodecTest.java
public void testEncodeDecoded_array() { Object[] array = new Object[5]; array[0] = Boolean.TRUE;//w w w .jav a2s .c o m array[1] = null; array[2] = Double.MIN_VALUE; array[3] = "Hello World"; array[4] = 2; assertEquals(Lists.newArrayList(array), ModelCodec.decode(ModelCodec.encode(array))); }
From source file:org.bitpipeline.lib.friendlyjson.JSONEntityTest.java
@Test public void testSerializationStrings() throws JSONMappingException, JSONException { Entity orig = createEntity(); JSONObject json = orig.toJson();/* w w w .j av a 2 s .c o m*/ assertNotNull(json); String origJsonStr = orig.toString(4); assertNotNull(origJsonStr); Entity copy = new Entity(origJsonStr); assertNotNull(copy); assertEquals(orig.aBoolean, copy.aBoolean); assertEquals(orig.aByte, copy.aByte); assertEquals(orig.aChar, copy.aChar); assertEquals(orig.aShort, copy.aShort); assertEquals(orig.aInt, copy.aInt); assertEquals(orig.aLong, copy.aLong); assertEquals(orig.aFloat, copy.aFloat, Float.MIN_VALUE * 10.0f); assertEquals(orig.aDouble, copy.aDouble, Double.MIN_VALUE * 10.0); assertEquals(orig.aString, copy.aString); assertFalse(orig.transientValue == copy.transientValue); assertNotNull(copy.aMap); for (String key : orig.aMap.keySet()) { assertEquals(orig.aMap.get(key), copy.aMap.get(key)); } assertNotNull(copy.aMapOfLists); for (String key : orig.aMapOfLists.keySet()) { assertEquals(orig.aMapOfLists.get(key), copy.aMapOfLists.get(key)); } assertNotNull(copy.xptos); assertEquals(copy.xptos.size(), orig.xptos.size()); for (Xpto x : orig.xptos) { boolean found = false; for (Xpto y : copy.xptos) if (x.getName().equals(y.getName())) found = true; assertTrue(found); } assertEquals(orig.toString(), copy.toString()); }
From source file:emlab.role.investment.InvestInPowerGenerationTechnologiesRole.java
public void act(EnergyProducer agent) { long futureTimePoint = getCurrentTick() + agent.getInvestmentFutureTimeHorizon(); // logger.warn(agent + " is looking at timepoint " + futureTimePoint); // ==== Expectations === // Fuel Prices Map<Substance, Double> expectedFuelPrices = new HashMap<Substance, Double>(); for (Substance substance : reps.genericRepository.findAll(Substance.class)) { // use last price expectedFuelPrices.put(substance, findLastKnownPriceForSubstance(substance));// TODO // use // expected // fuel // price }//from ww w . j av a 2 s . co m // CO2 Map<ElectricitySpotMarket, Double> expectedCO2Price = determineExpectedCO2PriceInclTax(futureTimePoint, 3);// TODO // use // expected // co2 // price // Investment decision for (ElectricitySpotMarket market : reps.genericRepository.findAllAtRandom(ElectricitySpotMarket.class)) { MarketInformation marketInformation = new MarketInformation(market, expectedFuelPrices, expectedCO2Price.get(market).doubleValue(), futureTimePoint); /* * if (marketInfoMap.containsKey(market) && marketInfoMap.get(market).time == futureTimePoint) { marketInformation = marketInfoMap.get(market); } else { marketInformation = new * MarketInformation(market, expectedFuelPrices, expectedCO2Price, futureTimePoint); marketInfoMap.put(market, marketInformation); } */ // logger.warn(agent + " is expecting a CO2 price of " + // expectedCO2Price.get(market) + " Euro/MWh at timepoint " // + futureTimePoint + " in Market " + market); double highestValue = Double.MIN_VALUE; PowerGeneratingTechnology bestTechnology = null; for (PowerGeneratingTechnology technology : reps.genericRepository .findAll(PowerGeneratingTechnology.class)) { PowerPlant plant = new PowerPlant(); plant.specifyNotPersist(getCurrentTick(), agent, getNodeForZone(market.getZone()), technology); // if too much capacity of this technology in the pipeline (not // limited to the 5 years) double expectedInstalledCapacityOfTechnology = reps.powerPlantRepository .calculateCapacityOfExpectedOperationalPowerPlantsInMarketAndTechnology(market, technology, futureTimePoint); double expectedOwnedTotalCapacityInMarket = reps.powerPlantRepository .calculateCapacityOfExpectedOperationalPowerPlantsInMarketByOwner(market, futureTimePoint, agent); double expectedOwnedCapacityInMarketOfThisTechnology = reps.powerPlantRepository .calculateCapacityOfExpectedOperationalPowerPlantsInMarketByOwnerAndTechnology(market, technology, futureTimePoint, agent); double capacityOfTechnologyInPipeline = reps.powerPlantRepository .calculateCapacityOfPowerPlantsByTechnologyInPipeline(technology, getCurrentTick()); double operationalCapacityOfTechnology = reps.powerPlantRepository .calculateCapacityOfOperationalPowerPlantsByTechnology(technology, getCurrentTick()); if ((expectedInstalledCapacityOfTechnology + technology.getCapacity()) / (marketInformation.maxExpectedLoad + technology.getCapacity()) > technology .getMaximumInstalledCapacityFractionInCountry()) { // logger.warn(agent + // " will not invest in {} technology because there's too much of this type in the market", // technology); } else if (expectedOwnedCapacityInMarketOfThisTechnology > expectedOwnedTotalCapacityInMarket * technology.getMaximumInstalledCapacityFractionPerAgent()) { // logger.warn(agent + // " will not invest in {} technology because there's too much capacity planned by him", // technology); } else if ((capacityOfTechnologyInPipeline > operationalCapacityOfTechnology) && capacityOfTechnologyInPipeline > 3000) { // TODO: // Dirty // hack, // but // reflects // that // you // cannot // expand // a // technology // out // of // zero. // logger.warn(agent + // " will not invest in {} technology because there's too much capacity in the pipeline", // technology); } else if (plant.getActualInvestedCapital() * (1 - agent.getDebtRatioOfInvestments()) > agent.getDownpaymentFractionOfCash() * agent.getCash()) { // logger.warn(agent + // " will not invest in {} technology as he does not have enough money for downpayment", // technology); // TODO: // Modifier // for // investment // costs // is // missing // here } else { Map<Substance, Double> myFuelPrices = new HashMap<Substance, Double>(); for (Substance fuel : technology.getFuels()) { myFuelPrices.put(fuel, expectedFuelPrices.get(fuel)); } Set<SubstanceShareInFuelMix> fuelMix = calculateFuelMix(plant, myFuelPrices, expectedCO2Price.get(market)); plant.setFuelMix(fuelMix); double expectedMarginalCost = determineExpectedMarginalCost(plant, expectedFuelPrices, expectedCO2Price.get(market)); double runningHours = 0d; double expectedGrossProfit = 0d; // logger.warn("Agent {} found that the installed capacity in the market {} in future to be " // + marketInformation.capacitySum + // "and expectde maximum demand to be " + // marketInformation.maxExpectedLoad, // agent, market); long numberOfSegments = reps.segmentRepository.count(); // TODO somehow the prices of long-term contracts could also // be used here to determine the expected profit. Maybe not // though... for (SegmentLoad segmentLoad : market.getLoadDurationCurve()) { double expectedElectricityPrice = marketInformation.expectedElectricityPricesPerSegment .get(segmentLoad.getSegment()); double hours = segmentLoad.getSegment().getLengthInHours(); if (expectedMarginalCost <= expectedElectricityPrice) { runningHours += hours; expectedGrossProfit += (expectedElectricityPrice - expectedMarginalCost) * hours * plant.getAvailableCapacity(futureTimePoint, segmentLoad.getSegment(), numberOfSegments); } } // logger.warn(agent + // "expects technology {} to have {} running", technology, // runningHours); // expect to meet minimum running hours? if (runningHours < plant.getTechnology().getMinimumRunningHours()) { // logger.warn(agent // + " will not invest in {} technology as he expect to have {} running, which is lower then required", // technology, runningHours); } else { double fixedOMCost = calculateFixedOperatingCost(plant);// / // plant.getTechnology().getCapacity(); double operatingProfit = expectedGrossProfit - fixedOMCost; // TODO // should // we // not // exclude // fixed // cost, // or // name // that // NET // profit? // TODO Alter discount rate on the basis of the amount // in long-term contracts? // TODO Alter discount rate on the basis of other stuff, // such as amount of money, market share, portfolio // size. // Calculation of weighted average cost of capital, // based on the companies debt-ratio double wacc = (1 - agent.getDebtRatioOfInvestments()) * agent.getEquityInterestRate() + agent.getDebtRatioOfInvestments() * agent.getLoanInterestRate(); // Creation of out cash-flow during power plant building // phase (note that the cash-flow is negative!) TreeMap<Integer, Double> discountedProjectCapitalOutflow = calculateSimplePowerPlantInvestmentCashFlow( technology.getDepreciationTime(), technology.getExpectedLeadtime(), plant.getActualInvestedCapital(), 0); // Creation of in cashflow during operation TreeMap<Integer, Double> discountedProjectCashInflow = calculateSimplePowerPlantInvestmentCashFlow( technology.getDepreciationTime(), technology.getExpectedLeadtime(), 0, operatingProfit); double discountedCapitalCosts = npv(discountedProjectCapitalOutflow, wacc);// are // defined // negative!! // technology.getCapacity(); // logger.warn("Agent {} found that the discounted capital for technology {} to be " // + discountedCapitalCosts, agent, // technology); double discountedOpProfit = npv(discountedProjectCashInflow, wacc); // logger.warn("Agent {} found the expected prices to be {}", // agent, // marketInformation.expectedElectricityPricesPerSegment); // logger.warn("Agent {} found that the projected discounted inflows for technology {} to be " // + discountedOpProfit, // agent, technology); double projectValue = discountedOpProfit + discountedCapitalCosts; // logger.warn( // "Agent {} found the project value for technology {} to be " // + Math.round(projectValue / // plant.getTechnology().getCapacity()) + // " EUR/kW (running hours: " // + runningHours + "", agent, technology); // double projectTotalValue = projectValuePerMW * // plant.getTechnology().getCapacity(); // double projectReturnOnInvestment = discountedOpProfit // / (-discountedCapitalCosts); /* * Divide by capacity, in order not to favour large power plants (which have the single largest NPV */ if (projectValue > 0 && projectValue / plant.getTechnology().getCapacity() > highestValue) { highestValue = projectValue / plant.getTechnology().getCapacity(); bestTechnology = plant.getTechnology(); } } } } if (bestTechnology != null) { // logger.warn("Agent {} invested in technology {} at tick " + getCurrentTick(), agent, bestTechnology); PowerPlant plant = new PowerPlant(); plant.specifyAndPersist(getCurrentTick(), agent, getNodeForZone(market.getZone()), bestTechnology); PowerPlantManufacturer manufacturer = reps.genericRepository .findFirst(PowerPlantManufacturer.class); BigBank bigbank = reps.genericRepository.findFirst(BigBank.class); double investmentCostPayedByEquity = plant.getActualInvestedCapital() * (1 - agent.getDebtRatioOfInvestments()); double investmentCostPayedByDebt = plant.getActualInvestedCapital() * agent.getDebtRatioOfInvestments(); double downPayment = investmentCostPayedByEquity; createSpreadOutDownPayments(agent, manufacturer, downPayment, plant); double amount = determineLoanAnnuities(investmentCostPayedByDebt, plant.getTechnology().getDepreciationTime(), agent.getLoanInterestRate()); // logger.warn("Loan amount is: " + amount); Loan loan = reps.loanRepository.createLoan(agent, bigbank, amount, plant.getTechnology().getDepreciationTime(), getCurrentTick(), plant); // Create the loan plant.createOrUpdateLoan(loan); } else { // logger.warn("{} found no suitable technology anymore to invest in at tick " // + getCurrentTick(), agent); // agent will not participate in the next round of investment if // he does not invest now setNotWillingToInvest(agent); } } }
From source file:gdt.jgui.entity.query.JQueryPanel.java
/** * The default constructor.//from ww w . ja v a 2 s . co m */ public JQueryPanel() { GridBagLayout gridBagLayout = new GridBagLayout(); gridBagLayout.columnWidths = new int[] { 100, 0, 0 }; gridBagLayout.rowHeights = new int[] { 0, 0, 0, 0 }; gridBagLayout.columnWeights = new double[] { 0.0, 1.0, Double.MIN_VALUE }; gridBagLayout.rowWeights = new double[] { 0.0, 0.0, 0.0, 0.0 }; setLayout(gridBagLayout); JLabel lblElement = new JLabel("Element"); GridBagConstraints gbc_lblElement = new GridBagConstraints(); gbc_lblElement.insets = new Insets(5, 5, 5, 5); gbc_lblElement.gridx = 0; gbc_lblElement.gridy = 0; gbc_lblElement.anchor = GridBagConstraints.FIRST_LINE_START; add(lblElement, gbc_lblElement); elementComboBox = new JComboBox<String>(); GridBagConstraints gbc_elementComboBox = new GridBagConstraints(); gbc_elementComboBox.fill = GridBagConstraints.HORIZONTAL; gbc_elementComboBox.insets = new Insets(0, 0, 5, 0); gbc_elementComboBox.gridx = 1; gbc_elementComboBox.gridy = 0; gbc_elementComboBox.anchor = GridBagConstraints.FIRST_LINE_START; add(elementComboBox, gbc_elementComboBox); elementComboBox.addItemListener(new ItemListener() { @Override public void itemStateChanged(ItemEvent e) { try { initItemNameSelector(); } catch (Exception ee) { LOGGER.severe(ee.toString()); } } }); JLabel lblItemNameField = new JLabel("Item field"); GridBagConstraints gbc_lblItemField = new GridBagConstraints(); gbc_lblItemField.insets = new Insets(5, 5, 5, 5); gbc_lblItemField.gridx = 0; gbc_lblItemField.gridy = 1; gbc_lblItemField.anchor = GridBagConstraints.FIRST_LINE_START; add(lblItemNameField, gbc_lblItemField); itemNameFieldComboBox = new JComboBox<String>(); GridBagConstraints gbc_itemNameFieldComboBox = new GridBagConstraints(); gbc_itemNameFieldComboBox.insets = new Insets(0, 0, 5, 0); gbc_itemNameFieldComboBox.fill = GridBagConstraints.HORIZONTAL; gbc_itemNameFieldComboBox.gridx = 1; gbc_itemNameFieldComboBox.gridy = 1; gbc_itemNameFieldComboBox.anchor = GridBagConstraints.FIRST_LINE_START; add(itemNameFieldComboBox, gbc_itemNameFieldComboBox); itemNameFieldComboBox.addItemListener(new ItemListener() { @Override public void itemStateChanged(ItemEvent e) { try { initItemNameSelector(); initItemValueSelector(); } catch (Exception ee) { LOGGER.severe(ee.toString()); } } }); JLabel itemTitle = new JLabel("Item title"); GridBagConstraints gbc_lblItemtitle = new GridBagConstraints(); gbc_lblItemtitle.insets = new Insets(5, 5, 5, 5); gbc_lblItemtitle.gridx = 0; gbc_lblItemtitle.gridy = 2; gbc_lblItemtitle.anchor = GridBagConstraints.FIRST_LINE_START; add(itemTitle, gbc_lblItemtitle); itemNameComboBox = new JComboBox<String>(); GridBagConstraints gbc_itemComboBox = new GridBagConstraints(); gbc_itemComboBox.insets = new Insets(0, 0, 5, 0); gbc_itemComboBox.fill = GridBagConstraints.HORIZONTAL; gbc_itemComboBox.gridx = 1; gbc_itemComboBox.gridy = 2; gbc_itemComboBox.anchor = GridBagConstraints.FIRST_LINE_START; add(itemNameComboBox, gbc_itemComboBox); itemNameComboBox.addItemListener(new ItemListener() { @Override public void itemStateChanged(ItemEvent e) { try { initItemValueSelector(); } catch (Exception ee) { LOGGER.severe(ee.toString()); } } }); JLabel itemValue = new JLabel("Item value"); GridBagConstraints gbc_lblItemValue = new GridBagConstraints(); gbc_lblItemValue.insets = new Insets(5, 5, 5, 5); gbc_lblItemValue.weighty = 0.0; gbc_lblItemValue.gridx = 0; gbc_lblItemValue.gridy = 3; gbc_lblItemValue.anchor = GridBagConstraints.FIRST_LINE_START; add(itemValue, gbc_lblItemValue); itemValueComboBox = new JComboBox<String>(); GridBagConstraints gbc_itemValueComboBox = new GridBagConstraints(); gbc_itemValueComboBox.insets = new Insets(0, 0, 5, 0); gbc_itemValueComboBox.fill = GridBagConstraints.HORIZONTAL; gbc_itemValueComboBox.gridx = 1; gbc_itemValueComboBox.gridy = 3; gbc_itemValueComboBox.anchor = GridBagConstraints.FIRST_LINE_START; add(itemValueComboBox, gbc_itemValueComboBox); table = new JTable(); JScrollPane scrollPane = new JScrollPane(table, JScrollPane.VERTICAL_SCROLLBAR_AS_NEEDED, JScrollPane.HORIZONTAL_SCROLLBAR_AS_NEEDED); table.setAutoResizeMode(JTable.AUTO_RESIZE_OFF); table.addMouseListener(new java.awt.event.MouseAdapter() { @Override public void mouseClicked(java.awt.event.MouseEvent evt) { int row = table.rowAtPoint(evt.getPoint()); int col = table.columnAtPoint(evt.getPoint()); System.out.println("JQueryPanel:cell click:row=" + row + " column=" + col); if (col == 1) { String label$ = (String) table.getValueAt(row, 1); System.out.println("JQueryPanel:cell click:label=" + label$); Entigrator entigrator = console.getEntigrator(entihome$); String entity$ = entigrator.indx_keyAtLabel(label$); JEntityFacetPanel efp = new JEntityFacetPanel(); String efpLocator$ = efp.getLocator(); efpLocator$ = Locator.append(efpLocator$, Entigrator.ENTIHOME, entihome$); efpLocator$ = Locator.append(efpLocator$, EntityHandler.ENTITY_KEY, entity$); JConsoleHandler.execute(console, efpLocator$); } } }); GridBagConstraints gbc_scroll_panel = new GridBagConstraints(); gbc_scroll_panel.anchor = GridBagConstraints.NORTH; gbc_scroll_panel.gridwidth = 2; gbc_scroll_panel.weighty = 1.0; gbc_scroll_panel.fill = GridBagConstraints.HORIZONTAL; gbc_scroll_panel.gridx = 0; gbc_scroll_panel.gridy = 4; add(scrollPane, gbc_scroll_panel); scrollPane.setMinimumSize(scrollPane.getPreferredSize()); }