List of usage examples for java.lang Float toString
public String toString()
From source file:org.openmeetings.app.data.flvrecord.converter.FlvInterviewConverter.java
public void stripAudioFromFLVs(FlvRecording flvRecording, boolean reconversion) { List<HashMap<String, String>> returnLog = new LinkedList<HashMap<String, String>>(); List<String> listOfFullWaveFiles = new LinkedList<String>(); String streamFolderName = getStreamFolderName(flvRecording); List<FlvRecordingMetaData> metaDataList = flvRecordingMetaDataDaoImpl .getFlvRecordingMetaDataAudioFlvsByRecording(flvRecording.getFlvRecordingId()); stripAudioFirstPass(flvRecording, returnLog, listOfFullWaveFiles, streamFolderName, metaDataList); try {//from w w w.j a v a 2 s .c o m // Merge Wave to Full Length String streamFolderGeneralName = getStreamFolderName(); String hashFileFullName = "INTERVIEW_" + flvRecording.getFlvRecordingId() + "_FINAL_WAVE.wav"; String outputFullWav = streamFolderName + hashFileFullName; deleteFileIfExists(outputFullWav); if (listOfFullWaveFiles.size() == 1) { outputFullWav = listOfFullWaveFiles.get(0); } else if (listOfFullWaveFiles.size() > 0) { String[] argv_full_sox; if (reconversion) { argv_full_sox = mergeAudioToWaves(listOfFullWaveFiles, outputFullWav, metaDataList); } else { argv_full_sox = mergeAudioToWaves(listOfFullWaveFiles, outputFullWav); } log.debug("START mergeAudioToWaves ################# "); log.debug(argv_full_sox.toString()); String iString = ""; for (int i = 0; i < argv_full_sox.length; i++) { iString += argv_full_sox[i] + " "; } log.debug(iString); log.debug("END mergeAudioToWaves ################# "); returnLog.add(ProcessHelper.executeScript("mergeWave", argv_full_sox)); } else { // create default Audio to merge it. // strip to content length String outputWav = streamFolderGeneralName + "one_second.wav"; // Calculate delta at beginning Long deltaTimeMilliSeconds = flvRecording.getRecordEnd().getTime() - flvRecording.getRecordStart().getTime(); Float deltaPadding = (Float.parseFloat(deltaTimeMilliSeconds.toString()) / 1000) - 1; String[] argv_full_sox = new String[] { this.getPathToSoX(), outputWav, outputFullWav, "pad", "0", deltaPadding.toString() }; log.debug("START generateSampleAudio ################# "); String tString = ""; for (int i = 0; i < argv_full_sox.length; i++) { tString += argv_full_sox[i] + " "; } log.debug(tString); log.debug("END generateSampleAudio ################# "); returnLog.add(ProcessHelper.executeScript("mergeWave", argv_full_sox)); } // Merge Audio with Video / Calculate resulting FLV // Start extracting image sequence int frameRate = 25; for (FlvRecordingMetaData flvRecordingMetaData : metaDataList) { // FLV to 24 FPS Sequence AVI String inputFlv = streamFolderName + flvRecordingMetaData.getStreamName() + ".flv"; File inputFlvFile = new File(inputFlv); if (inputFlvFile.exists()) { // TO Image Sequence String outputMetaImageData = streamFolderName + flvRecordingMetaData.getFlvRecordingMetaDataId() + File.separatorChar; // Image Folder File imageSequenceFolder = new File(outputMetaImageData); imageSequenceFolder.mkdir(); String outputImages = outputMetaImageData + "image%d.png"; String[] argv_imageSeq = new String[] { this.getPathToFFMPEG(), "-i", inputFlv, "-r", "" + frameRate, "-s", "320x240", outputImages }; log.debug("START generateImageSequence ################# "); String iString = ""; for (int i = 0; i < argv_imageSeq.length; i++) { iString += argv_imageSeq[i] + " "; } log.debug(iString); log.debug("END generateImageSequence ################# "); returnLog.add(ProcessHelper.executeScript("generateImageSequence", argv_imageSeq)); } } // Default Image for empty interview video pods String defaultInterviewImage = streamFolderGeneralName + "default_interview_image.png"; File defaultInterviewImageFile = new File(defaultInterviewImage); if (!defaultInterviewImageFile.exists()) { throw new Exception("defaultInterviewImageFile does not exist!"); } // Create Folder for the output Image Sequence String outputImageMergedData = streamFolderName + "INTERVIEW_" + flvRecording.getFlvRecordingId() + File.separatorChar; // Merged Image Folder File outputImageMergedDateFolder = new File(outputImageMergedData); outputImageMergedDateFolder.mkdir(); // Generate the Single Image by sequencing boolean jobRunning = true; long currentTimeInMilliSeconds = 0; long completeLengthInSeconds = flvRecording.getRecordEnd().getTime() - flvRecording.getRecordStart().getTime(); log.debug("completeLengthInSeconds :: " + completeLengthInSeconds); int sequenceCounter = 0; while (jobRunning) { // Process one Second of Movie String[] interviewPod1Images = new String[frameRate]; String[] interviewPod2Images = new String[frameRate]; int[] outputFrameNumbers = new int[frameRate]; for (FlvRecordingMetaData flvRecordingMetaData : metaDataList) { long deltaStartRecording = flvRecordingMetaData.getRecordStart().getTime() - flvRecording.getRecordStart().getTime(); if (flvRecording.getRecordStart().getTime() + currentTimeInMilliSeconds >= flvRecordingMetaData .getRecordStart().getTime() && flvRecording.getRecordStart().getTime() + currentTimeInMilliSeconds <= flvRecordingMetaData.getRecordEnd().getTime()) { // Calculate which images should be in here // Calculate the relative starting point long thisImageSequenceStartingPoint = currentTimeInMilliSeconds - deltaStartRecording; // Calculate the first and following frameRate FPS // Number int secondToStart = Long.valueOf(thisImageSequenceStartingPoint / 1000).intValue(); int firstFrame = secondToStart * frameRate; for (int i = 0; i < frameRate; i++) { int currentImageNumber = firstFrame + i; currentImageNumber -= (frameRate / 2); // Remove the // first // half // seconds // and fill // it up // with // black // screens // Remove the first period of Images, this is where // the user has started // to share his Video but does not have agreed in // the Flash Security Warning Dialogue Integer initialGapSeconds = flvRecordingMetaData.getInitialGapSeconds(); if (initialGapSeconds != null) { int initialMissingImages = Double .valueOf(Math.floor((initialGapSeconds / 1000) * frameRate)).intValue(); currentImageNumber -= initialMissingImages; } String imageName = "image" + currentImageNumber + ".png"; String outputMetaImageFullData = streamFolderName + flvRecordingMetaData.getFlvRecordingMetaDataId() + File.separatorChar + imageName; File outputMetaImageFullDataFile = new File(outputMetaImageFullData); if (!outputMetaImageFullDataFile.exists()) { outputMetaImageFullData = defaultInterviewImage; } if (flvRecordingMetaData.getInteriewPodId() == 1) { interviewPod1Images[i] = outputMetaImageFullData; } else if (flvRecordingMetaData.getInteriewPodId() == 2) { interviewPod2Images[i] = outputMetaImageFullData; } } } } // Update Sequence Count for (int i = 0; i < frameRate; i++) { outputFrameNumbers[i] = sequenceCounter; sequenceCounter++; } // Now we should have found the needed Images to calculate, in // case not we add an empty black screen for (int i = 0; i < frameRate; i++) { String addZeros = ""; String outputImageName = outputImageMergedData + "image" + addZeros + outputFrameNumbers[i] + ".png"; if (interviewPod1Images[i] == null) { interviewPod1Images[i] = defaultInterviewImage; } if (interviewPod2Images[i] == null) { interviewPod2Images[i] = defaultInterviewImage; } if (System.getProperty("os.name").toUpperCase().indexOf("WINDOWS") == -1) { String[] argv_imageMagick = new String[] { this.getPathToImageMagick(), "+append", interviewPod1Images[i], interviewPod2Images[i], outputImageName }; returnLog.add(ProcessHelper.executeScript("generateImageSequence", argv_imageMagick)); } else { returnLog.add(processImageWindows(interviewPod1Images[i], interviewPod2Images[i], outputImageName)); } } currentTimeInMilliSeconds += 1000; double cLength = 100 * ((double) currentTimeInMilliSeconds) / completeLengthInSeconds; int progress = Double.valueOf(cLength).intValue(); log.debug("completeLengthInSeconds|currentTimeInMilliSeconds " + completeLengthInSeconds + "|" + currentTimeInMilliSeconds + "|" + progress + "|" + cLength); flvRecordingDaoImpl.updateFlvRecordingProgress(flvRecording.getFlvRecordingId(), progress); if (currentTimeInMilliSeconds >= completeLengthInSeconds) { jobRunning = false; } } // Generate Movie by sequence of Images String imagescomplete = outputImageMergedData + "image%d.png"; String[] argv_generatedMoview = null; String inputScreenFullFlv = streamFolderName + "COMPLETE_INTERVIEW_" + flvRecording.getFlvRecordingId() + ".flv"; deleteFileIfExists(inputScreenFullFlv); argv_generatedMoview = new String[] { this.getPathToFFMPEG(), "-i", imagescomplete, "-r", "" + frameRate, "-vcodec", "flv", "-qmax", "1", "-qmin", "1", inputScreenFullFlv }; log.debug("START generateFullBySequenceFLV ################# "); String tString2 = ""; for (int i = 0; i < argv_generatedMoview.length; i++) { tString2 += argv_generatedMoview[i] + " "; } log.debug(tString2); log.debug("END generateFullBySequenceFLV ################# "); returnLog.add(ProcessHelper.executeScript("generateFullBySequenceFLV", argv_generatedMoview)); String hashFileFullNameFlv = "flvRecording_" + flvRecording.getFlvRecordingId() + ".flv"; String outputFullFlv = streamFolderGeneralName + hashFileFullNameFlv; deleteFileIfExists(outputFullFlv); // ffmpeg -vcodec flv -qscale 9.5 -r 25 -ar 22050 -ab 32k -s 320x240 // -i // 65318fb5c54b1bc1b1bca077b493a914_28_12_2009_23_38_17_FINAL_WAVE.wav // -i 65318fb5c54b1bc1b1bca077b493a914_28_12_2009_23_38_17.flv // final1.flv int flvWidth = 640; int flvHeight = 240; flvRecording.setFlvWidth(flvWidth); flvRecording.setFlvHeight(flvHeight); String[] argv_fullFLV = new String[] { this.getPathToFFMPEG(), // "-i", inputScreenFullFlv, "-i", outputFullWav, "-ar", "22050", // "-ab", "32k", // "-s", flvWidth + "x" + flvHeight, // "-vcodec", "flv", // "-r", "" + frameRate, "-qmax", "1", "-qmin", "1", outputFullFlv }; log.debug("START generateFullFLV ################# "); String tString = ""; for (int i = 0; i < argv_fullFLV.length; i++) { tString += argv_fullFLV[i] + " "; // log.debug(" i " + i + " argv-i " + argv_fullFLV[i]); } log.debug(tString); log.debug("END generateFullFLV ################# "); returnLog.add(ProcessHelper.executeScript("generateFullFLV", argv_fullFLV)); flvRecording.setFileHash(hashFileFullNameFlv); // Extract first Image for preview purpose // ffmpeg -i movie.flv -vcodec mjpeg -vframes 1 -an -f rawvideo -s // 320x240 movie.jpg String hashFileFullNameJPEG = "flvRecording_" + flvRecording.getFlvRecordingId() + ".jpg"; String outPutJpeg = streamFolderGeneralName + hashFileFullNameJPEG; deleteFileIfExists(outPutJpeg); flvRecording.setPreviewImage(hashFileFullNameJPEG); String[] argv_previewFLV = new String[] { // this.getPathToFFMPEG(), // "-i", outputFullFlv, // "-vcodec", "mjpeg", // "-vframes", "100", "-an", // "-f", "rawvideo", // "-s", flvWidth + "x" + flvHeight, // outPutJpeg }; log.debug("START previewFullFLV ################# "); log.debug(argv_previewFLV.toString()); String kString = ""; for (int i = 0; i < argv_previewFLV.length; i++) { kString += argv_previewFLV[i] + " "; } log.debug(kString); log.debug("END previewFullFLV ################# "); returnLog.add(ProcessHelper.executeScript("generateFullFLV", argv_previewFLV)); String alternateDownloadName = "flvRecording_" + flvRecording.getFlvRecordingId() + ".avi"; String alternateDownloadFullName = streamFolderGeneralName + alternateDownloadName; deleteFileIfExists(alternateDownloadFullName); String[] argv_alternateDownload = new String[] { this.getPathToFFMPEG(), "-i", outputFullFlv, alternateDownloadFullName }; log.debug("START alternateDownLoad ################# "); log.debug(argv_previewFLV.toString()); String sString = ""; for (int i = 0; i < argv_alternateDownload.length; i++) { sString += argv_alternateDownload[i] + " "; } log.debug(sString); log.debug("END alternateDownLoad ################# "); returnLog.add(ProcessHelper.executeScript("alternateDownload", argv_alternateDownload)); flvRecording.setAlternateDownload(alternateDownloadName); flvRecordingDaoImpl.updateFlvRecording(flvRecording); flvRecordingLogDaoImpl.deleteFLVRecordingLogByRecordingId(flvRecording.getFlvRecordingId()); for (HashMap<String, String> returnMap : returnLog) { flvRecordingLogDaoImpl.addFLVRecordingLog("generateFFMPEG", flvRecording, returnMap); } // Delete Wave Files for (String fileName : listOfFullWaveFiles) { File audio = new File(fileName); if (audio.exists()) { audio.delete(); } } // Delete all Image temp dirs for (FlvRecordingMetaData flvRecordingMetaData : metaDataList) { String outputMetaImageFullData = streamFolderName + flvRecordingMetaData.getFlvRecordingMetaDataId() + File.separatorChar; this.deleteDirectory(new File(outputMetaImageFullData)); } this.deleteDirectory(new File(outputImageMergedData)); } catch (Exception err) { log.error("[stripAudioFromFLVs]", err); } }
From source file:org.oryxeditor.server.BPMN2_0WDToYAWL.java
public String getYAWLSpecification(String js, String rsm) { String spec = ""; String end = ""; HashSet<String> translatedElements = new HashSet<String>(); try {/* ww w .ja v a2 s. c o m*/ this.oryxStencil = new JSONObject(js); this.rsmStencil = new JSONObject(rsm); Float version = 0.1F; if (this.oryxStencil.getJSONObject("properties").getString("version").length() > 0) { version = new Float(this.oryxStencil.getJSONObject("properties").getString("version")); } spec += "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n"; spec += "<specificationSet xmlns=\"http://www.yawlfoundation.org/yawlschema\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" version=\"2.1\" xsi:schemaLocation=\"http://www.yawlfoundation.org/yawlschema http://www.yawlfoundation.org/yawlschema/YAWL_Schema2.1.xsd\">\n"; spec += "<specification uri=\"example\">\n"; spec += "\t\t<metaData>\n"; spec += "\t\t\t<creator>" + this.oryxStencil.getJSONObject("properties").getString("author") + "</creator>\n"; spec += "\t\t\t<description>" + this.oryxStencil.getJSONObject("properties").getString("documentation") + "</description>\n"; spec += "\t\t\t<version>" + version.toString() + "</version>\n"; spec += "\t\t\t<persistent>false</persistent>\n"; spec += "\t\t\t<identifier>" + this.oryxStencil.getString("resourceId") + "</identifier>\n"; spec += "\t\t</metaData>\n"; spec += "<xs:schema xmlns:xs=\"http://www.w3.org/2001/XMLSchema\" />\n"; spec += "<decomposition id=\"New_Net_1\" isRootNet=\"true\" xsi:type=\"NetFactsType\">\n"; spec += "\t\t\t<processControlElements>\n"; JSONArray flowElements = new JSONArray(); flowElements.put(this.getResourcesByStencilId(this.oryxStencil, "StartNoneEvent").getJSONObject(0)); while (flowElements.length() > 0) { JSONArray nextFlowElements = new JSONArray(); for (int i = 0; i < flowElements.length(); i++) { JSONObject controlElement = flowElements.getJSONObject(i); if (!translatedElements.contains(controlElement.getString("resourceId"))) { if (controlElement.getJSONObject("stencil").getString("id").equals("StartNoneEvent")) { spec += this.getStartNoneEventToInputCondition(controlElement); } else if (controlElement.getJSONObject("stencil").getString("id").equals("Task")) { spec += this.getTaskToTask(controlElement); } else if (controlElement.getJSONObject("stencil").getString("id").equals("EndNoneEvent")) { end += this.getEndNoneEventToOutputCondition(controlElement); } JSONArray nextToThisElement = this.getNextControlElements(controlElement); for (int j = 0; j < nextToThisElement.length(); j++) { nextFlowElements.put(nextToThisElement.getJSONObject(j)); } translatedElements.add(controlElement.getString("resourceId")); } } flowElements = nextFlowElements; } spec += end; spec += "\t\t\t</processControlElements>\n"; spec += "\t\t</decomposition>\n"; spec += this.taskDecompositions; spec += "\t</specification>\n"; spec += "</specificationSet>\n"; } catch (JSONException e) { // TODO Auto-generated catch block e.printStackTrace(); } return spec; }
From source file:gov.nih.nci.caintegrator.data.CaIntegrator2DaoImpl.java
/** * {@inheritDoc}//from ww w. j a v a 2 s . c o m */ @Override @SuppressWarnings(UNCHECKED) // Hibernate operations are untyped public AnnotationDefinition getAnnotationDefinition(Long cdeId, Float version) { List<AnnotationDefinition> values = (version == null) ? getCurrentSession().createCriteria(AnnotationDefinition.class) .add(Restrictions.eq("commonDataElement.publicID", cdeId)).list() : getCurrentSession().createCriteria(AnnotationDefinition.class) .add(Restrictions.eq("commonDataElement.publicID", cdeId)) .add(Restrictions.eq("commonDataElement.version", version.toString())).list(); if (values.isEmpty()) { return null; } else { return latestVersion(values); } }
From source file:io.swagger.client.api.DefaultApi.java
/** * One-shot movie scene// ww w. ja v a 2 s.c o m * Retrieves the current list of cards related to the given movie scene * @param authorization Authorization token ('Bearer <token>') * @param clientMovieId Client movie ID being played * @param timestamp Current movie timestamp in seconds * @param acceptLanguage Client locale, as <language>-<country> * @param contentType application/json * @param relations Indicates if the received scene cards should contain the scene hierarchy (true) or be a flat list of the main scene items (false) * @param imageSize Size of the images returned in the response * @return List<Card> */ public List<Card> getStaticMovieScene(String authorization, String clientMovieId, Float timestamp, String acceptLanguage, String contentType, Boolean relations, String imageSize) throws ApiException { Object localVarPostBody = null; // verify the required parameter 'authorization' is set if (authorization == null) { throw new ApiException(400, "Missing the required parameter 'authorization' when calling getStaticMovieScene"); } // verify the required parameter 'clientMovieId' is set if (clientMovieId == null) { throw new ApiException(400, "Missing the required parameter 'clientMovieId' when calling getStaticMovieScene"); } // verify the required parameter 'timestamp' is set if (timestamp == null) { throw new ApiException(400, "Missing the required parameter 'timestamp' when calling getStaticMovieScene"); } // create path and map variables String localVarPath = "/movies/{client_movie_id}/static/{timestamp}".replaceAll("\\{format\\}", "json") .replaceAll("\\{" + "client_movie_id" + "\\}", apiInvoker.escapeString(clientMovieId.toString())) .replaceAll("\\{" + "timestamp" + "\\}", apiInvoker.escapeString(timestamp.toString())); // query params List<Pair> localVarQueryParams = new ArrayList<Pair>(); // header params Map<String, String> localVarHeaderParams = new HashMap<String, String>(); // form params Map<String, String> localVarFormParams = new HashMap<String, String>(); localVarQueryParams.addAll(ApiInvoker.parameterToPairs("", "relations", relations)); localVarQueryParams.addAll(ApiInvoker.parameterToPairs("", "image_size", imageSize)); localVarHeaderParams.put("Authorization", ApiInvoker.parameterToString(authorization)); localVarHeaderParams.put("Accept-Language", ApiInvoker.parameterToString(acceptLanguage)); localVarHeaderParams.put("Content-Type", ApiInvoker.parameterToString(contentType)); String[] localVarContentTypes = { "application/json" }; String localVarContentType = localVarContentTypes.length > 0 ? localVarContentTypes[0] : "application/json"; if (localVarContentType.startsWith("multipart/form-data")) { // file uploading MultipartEntityBuilder localVarBuilder = MultipartEntityBuilder.create(); localVarPostBody = localVarBuilder.build(); } else { // normal form params } try { String localVarResponse = apiInvoker.invokeAPI(basePath, localVarPath, "GET", localVarQueryParams, localVarPostBody, localVarHeaderParams, localVarFormParams, localVarContentType); if (localVarResponse != null) { return (List<Card>) ApiInvoker.deserialize(localVarResponse, "array", Card.class); } else { return null; } } catch (ApiException ex) { throw ex; } }
From source file:com.google.appinventor.components.runtime.GoogleMap.java
@SimpleFunction(description = "Set the property of a marker, note that the marker has to be added first or else will " + "throw an exception! Properties include: \"color\"(hue value ranging from 0~360), \"title\", " + "\"snippet\", \"draggable\"(give either true or false as the value).") public void UpdateMarker(int markerId, String propertyName, Object value) { // we don't support update lat, lng here, one can remove the marker and add // a new one/*from w ww. j a va 2 s .com*/ String property = propertyName.trim(); String propVal = value.toString().trim(); //convert everything to String first Log.i(TAG, "@UpdateMarker"); Log.i(TAG, "markerId:" + markerId); Log.i(TAG, "prop:" + propertyName); Log.i(TAG, "value:" + value); Marker marker = getMarkerIfExisted(markerId); Log.i(TAG, "marker?:" + marker); if (marker != null) { if (property.equals("color")) { Log.i(TAG, "we are changing color"); Float hue = new Float(propVal); if (hue < 0 || hue > 360) { form.dispatchErrorOccurredEvent(this, "UpdateMarker", ErrorMessages.ERROR_GOOGLE_MAP_INVALID_INPUT, hue.toString()); } else { marker.setIcon(BitmapDescriptorFactory.defaultMarker(new Float(propVal))); } } if (property.equals("title")) { Log.i(TAG, "we are changing title"); marker.setTitle(propVal); } if (property.equals("snippet")) { Log.i(TAG, "we are changing snippet"); marker.setSnippet(propVal); } if (property.equals("draggable")) { Log.i(TAG, "we are changing draggable"); marker.setDraggable(new Boolean(propVal)); } } }
From source file:org.jboss.bqt.client.xml.XMLQueryVisitationStrategy.java
/** * Produce an XML message for an instance of the Float. * <br>//from w ww. j ava 2 s . co m * @param object the instance for which the message is to be produced. * @param parent the XML element that is to be the parent of the produced XML message. * @return the root element of the XML segment that was produced. * @exception JDOMException if there is an error producing the message. */ private Element produceMsg(Float object, Element parent) throws JDOMException { // ---------------------- // Create the Float element ... // ---------------------- Element floatElement = new Element(TagNames.Elements.FLOAT); floatElement.setText(object.toString()); if (parent != null) { floatElement = parent.addContent(floatElement); } return floatElement; }
From source file:org.fao.geonet.kernel.search.LuceneSearcher.java
/** * TODO javadoc.//from w w w . ja va 2s . c om * * @param srvContext * @param request * @param config * @return An empty response if no result or a list of results. Return only geonet:info element in fast mode. * @throws Exception */ public Element present(ServiceContext srvContext, Element request, ServiceConfig config) throws Exception { updateSearchRange(request); GeonetContext gc = null; if (srvContext != null) gc = (GeonetContext) srvContext.getHandlerContext(Geonet.CONTEXT_NAME); String sFast = request.getChildText(Geonet.SearchResult.FAST); boolean fast = sFast != null && sFast.equals("true"); boolean inFastMode = fast || "index".equals(sFast); // build response Element response = new Element("response"); response.setAttribute("from", getFrom() + ""); response.setAttribute("to", getTo() + ""); if (Log.isDebugEnabled(Geonet.SEARCH_ENGINE)) Log.debug(Geonet.SEARCH_ENGINE, Xml.getString(response)); // Add summary if required and exists String sBuildSummary = request.getChildText(Geonet.SearchResult.BUILD_SUMMARY); boolean buildSummary = sBuildSummary == null || sBuildSummary.equals("true"); if (buildSummary && _elSummary != null) response.addContent((Element) _elSummary.clone()); if (getTo() > 0) { TopDocs tdocs = performQuery(getFrom() - 1, getTo(), false); // get enough hits to show a page int nrHits = getTo() - (getFrom() - 1); if (tdocs.scoreDocs.length >= nrHits) { for (int i = 0; i < nrHits; i++) { Document doc; IndexAndTaxonomy indexAndTaxonomy = _sm.getIndexReader(_language, _versionToken); _versionToken = indexAndTaxonomy.version; try { if (inFastMode) { // no selector doc = indexAndTaxonomy.indexReader.document(tdocs.scoreDocs[i].doc); } else { DocumentStoredFieldVisitor docVisitor = new DocumentStoredFieldVisitor("_id"); indexAndTaxonomy.indexReader.document(tdocs.scoreDocs[i].doc, docVisitor); doc = docVisitor.getDocument(); } } finally { _sm.releaseIndexReader(indexAndTaxonomy); } String id = doc.get("_id"); Element md = null; if (fast) { md = LuceneSearcher.getMetadataFromIndex(doc, id, false, null, null, null); } else if ("index".equals(sFast)) { // Retrieve information from the index for the record md = LuceneSearcher.getMetadataFromIndex(doc, id, true, _language == null ? srvContext.getLanguage() : _language, _luceneConfig.getMultilingualSortFields(), _luceneConfig.getDumpFields()); // Retrieve dynamic properties according to context (eg. editable) gc.getDataManager().buildExtraMetadataInfo(srvContext, id, md.getChild(Edit.RootChild.INFO, Edit.NAMESPACE)); } else if (srvContext != null) { boolean forEditing = false, withValidationErrors = false, keepXlinkAttributes = false; md = gc.getDataManager().getMetadata(srvContext, id, forEditing, withValidationErrors, keepXlinkAttributes); } //--- a metadata could have been deleted just before showing //--- search results if (md != null) { // Calculate score and add it to info elem if (_luceneConfig.isTrackDocScores()) { Float score = tdocs.scoreDocs[i].score; Element info = md.getChild(Edit.RootChild.INFO, Edit.NAMESPACE); addElement(info, Edit.Info.Elem.SCORE, score.toString()); } response.addContent(md); } } } else { throw new Exception("Failed: Not enough search results (" + tdocs.scoreDocs.length + ") available to meet request for " + nrHits + "."); } } return response; }
From source file:net.sourceforge.msscodefactory.v1_10.MSSBamPg8.MSSBamPg8FloatDefTable.java
public void createFloatDef(MSSBamAuthorization Authorization, MSSBamFloatDefBuff Buff) { final String S_ProcName = "createFloatDef "; try {//from w w w.j av a 2 s .com Connection cnx = schema.getCnx(); long Id = Buff.getRequiredId(); Float InitValue = Buff.getOptionalInitValue(); Float DefaultValue = Buff.getOptionalDefaultValue(); Float MinValue = Buff.getOptionalMinValue(); Float MaxValue = Buff.getOptionalMaxValue(); Float NullValue = Buff.getOptionalNullValue(); Float UnknownValue = Buff.getOptionalUnknownValue(); String sql = "INSERT INTO mssbam110.float_def( " + "id, " + "initval, " + "defval, " + "minval, " + "maxval, " + "nullvalue, " + "unknownval" + " )" + "VALUES ( " + Id + ", " + ((InitValue == null) ? "null" : InitValue.toString()) + ", " + ((DefaultValue == null) ? "null" : DefaultValue.toString()) + ", " + ((MinValue == null) ? "null" : MinValue.toString()) + ", " + ((MaxValue == null) ? "null" : MaxValue.toString()) + ", " + ((NullValue == null) ? "null" : NullValue.toString()) + ", " + ((UnknownValue == null) ? "null" : UnknownValue.toString()) + " )"; Statement stmt = cnx.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); int rowsAffected = stmt.executeUpdate(sql); if (rowsAffected != 1) { throw CFLib.getDefaultExceptionFactory().newRuntimeException(getClass(), S_ProcName, "Expected 1 row to be affected by insert, not " + rowsAffected); } } catch (SQLException e) { throw CFLib.getDefaultExceptionFactory().newDbException(getClass(), S_ProcName, e); } }
From source file:org.craftercms.cstudio.alfresco.service.impl.PersistenceManagerServiceImpl.java
/** * load content properties from the content metadata *//* ww w .j av a 2 s .com*/ @SuppressWarnings("unchecked") protected void loadContentProperties(String fullPath, NodeRef nodeRef, DmContentItemTO item, Map<QName, Serializable> nodeProperties) { // read common metadata String internalName = DefaultTypeConverter.INSTANCE.convert(String.class, nodeProperties.get(CStudioContentModel.PROP_INTERNAL_NAME)); // set internal name if (!StringUtils.isEmpty(internalName)) { item.setInternalName(internalName); String title = DefaultTypeConverter.INSTANCE.convert(String.class, nodeProperties.get(CStudioContentModel.PROP_TITLE)); if (!StringUtils.isEmpty(title)) { item.setTitle(title); } } else { String title = DefaultTypeConverter.INSTANCE.convert(String.class, nodeProperties.get(CStudioContentModel.PROP_INTERNAL_NAME)); if (!StringUtils.isEmpty(title)) { item.setInternalName(title); item.setTitle(title); } } String metaDescription = DefaultTypeConverter.INSTANCE.convert(String.class, nodeProperties.get(CStudioContentModel.PROP_META_DESCRIPTION)); if (metaDescription != null) { item.setMetaDescription(metaDescription); } // set other status flags Boolean floating = DefaultTypeConverter.INSTANCE.convert(Boolean.class, nodeProperties.get(CStudioContentModel.PROP_FLOATING)); if (floating != null) { item.setFloating(floating); } else { item.setFloating(false); } Boolean disabled = DefaultTypeConverter.INSTANCE.convert(Boolean.class, nodeProperties.get(CStudioContentModel.PROP_DISABLED)); if (disabled != null) { item.setDisabled(disabled); } else { item.setDisabled(false); } // set orders Float orderDefault = DefaultTypeConverter.INSTANCE.convert(Float.class, nodeProperties.get(CStudioContentModel.PROP_ORDER_DEFAULT)); if (orderDefault != null) { List<DmOrderTO> orders = new FastList<DmOrderTO>(); addOrderValue(orders, DmConstants.JSON_KEY_ORDER_DEFAULT, orderDefault.toString()); item.setOrders(orders); } }
From source file:org.apache.solr.update.processor.AddSchemaFieldsUpdateProcessorFactoryTest.java
public void testParseAndAddMultipleFieldsRoundTrip() throws Exception { IndexSchema schema = h.getCore().getLatestSchema(); final String fieldName1 = "newfield7"; final String fieldName2 = "newfield8"; final String fieldName3 = "newfield9"; final String fieldName4 = "newfield10"; assertNull(schema.getFieldOrNull(fieldName1)); assertNull(schema.getFieldOrNull(fieldName2)); assertNull(schema.getFieldOrNull(fieldName3)); assertNull(schema.getFieldOrNull(fieldName4)); String field1String1 = "-13,258.0"; Float field1Value1 = -13258.0f; String field1String2 = "84,828,800,808.0"; Double field1Value2 = 8.4828800808E10; String field1String3 = "999"; Long field1Value3 = 999L;//from w w w. ja va 2s.c o m String field2String1 = "55,123"; Integer field2Value1 = 55123; String field2String2 = "1,234,567,890,123,456,789"; Long field2Value2 = 1234567890123456789L; String field3String1 = "blah-blah"; String field3Value1 = field3String1; String field3String2 = "-5.28E-3"; Double field3Value2 = -5.28E-3; String field4String1 = "1999-04-17 17:42"; DateTimeFormatter dateTimeFormatter = DateTimeFormat.forPattern("yyyy-MM-dd HH:mm").withZoneUTC(); DateTime dateTime = dateTimeFormatter.parseDateTime(field4String1); Date field4Value1 = dateTime.toDate(); DateTimeFormatter dateTimeFormatter2 = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss").withZoneUTC(); String field4Value1String = dateTimeFormatter2.print(dateTime) + "Z"; SolrInputDocument d = processAdd("parse-and-add-fields", doc(f("id", "6"), f(fieldName1, field1String1, field1String2, field1String3), f(fieldName2, field2String1, field2String2), f(fieldName3, field3String1, field3String2), f(fieldName4, field4String1))); assertNotNull(d); schema = h.getCore().getLatestSchema(); assertNotNull(schema.getFieldOrNull(fieldName1)); assertNotNull(schema.getFieldOrNull(fieldName2)); assertNotNull(schema.getFieldOrNull(fieldName3)); assertNotNull(schema.getFieldOrNull(fieldName4)); assertEquals("tdouble", schema.getFieldType(fieldName1).getTypeName()); assertEquals("tlong", schema.getFieldType(fieldName2).getTypeName()); assertEquals("text", schema.getFieldType(fieldName3).getTypeName()); assertEquals("tdate", schema.getFieldType(fieldName4).getTypeName()); assertU(commit()); assertQ(req("id:6"), "//arr[@name='" + fieldName1 + "']/double[.='" + field1Value1.toString() + "']", "//arr[@name='" + fieldName1 + "']/double[.='" + field1Value2.toString() + "']", "//arr[@name='" + fieldName1 + "']/double[.='" + field1Value3.doubleValue() + "']", "//arr[@name='" + fieldName2 + "']/long[.='" + field2Value1.toString() + "']", "//arr[@name='" + fieldName2 + "']/long[.='" + field2Value2.toString() + "']", "//arr[@name='" + fieldName3 + "']/str[.='" + field3String1 + "']", "//arr[@name='" + fieldName3 + "']/str[.='" + field3String2 + "']", "//arr[@name='" + fieldName4 + "']/date[.='" + field4Value1String + "']"); }