List of usage examples for java.lang InstantiationException printStackTrace
public void printStackTrace()
From source file:edu.ku.brc.specify.dbsupport.cleanuptools.MultipleRecordComparer.java
/** * @return//from www .j a v a 2 s . co m */ public String getFormattedTitle(final Object[] rowData) { if ((isSingleCol || isParent) && indexForTitle > -1 && rowData[indexForTitle] != null) { return rowData[indexForTitle].toString(); } if (displayCols.size() > 0) { if (displayObj == null) { try { displayObj = (DataModelObjBase) tblInfo.getClassObj().newInstance(); } catch (InstantiationException e) { e.printStackTrace(); } catch (IllegalAccessException e) { e.printStackTrace(); } } if (displayObj != null) { for (DisplayColInfo dci : displayCols) { if (!dci.isIncludeSQLOnly()) { int inx = colToIndexMap.get(dci.getFi()); FormHelper.setValue(displayObj, dci.getFi().getColumn(), rowData[inx]); } } UIFieldFormatterIFace formatter = UIFieldFormatterMgr.getInstance() .getFormatter(tblInfo.getDataObjFormatter()); Object fmtObj = formatter != null ? formatter.formatToUI(displayObj) : null; return fmtObj != null ? fmtObj.toString() : displayObj.getIdentityTitle(); } } return fii.toString(); }
From source file:keel.Algorithms.Neural_Networks.NNEP_Common.neuralnet.LinkedLayer.java
/** * <p>// w w w . ja v a2s . co m * Returns a copy of this linked layer * </p> * @param previousLayer Previous layer to which copied neurons * are going to be linked * @return LinkedLayer Copy of this linked layer */ public LinkedLayer copy(ILayer<? extends INeuron> previousLayer) { LinkedLayer result = null; try { // Generate new layer result = this.getClass().newInstance(); // Copy properties of the layer result.setMinnofneurons(this.minnofneurons); result.setInitialmaxnofneurons(this.initialmaxnofneurons); result.setMaxnofneurons(this.maxnofneurons); result.setType(this.type); result.setBiased(this.biased); // Copy each neuron for (LinkedNeuron neuron : this.neurons) result.addNeuron(neuron.copy(previousLayer)); } catch (InstantiationException e) { System.out.println("Instantiation Error " + e.getLocalizedMessage()); e.printStackTrace(); } catch (IllegalAccessException e) { System.out.println("Illegal Access Error " + e.getLocalizedMessage()); e.printStackTrace(); } return result; }
From source file:keel.Algorithms.Neural_Networks.NNEP_Common.neuralnet.LinkedNeuron.java
/** * <p>/*w w w . j a va 2 s . c om*/ * Returns a copy of this linked neuron * </p> * @param previousLayer Previous layer to which copied neuron * is going to be linked * @return LinkedNeuron Copy of this linked neuron */ public LinkedNeuron copy(ILayer<? extends INeuron> previousLayer) { LinkedNeuron result = null; try { // Generate new neuron result = this.getClass().newInstance(); // Copy biased property result.setBiased(this.biased); // Copy weight range result.setWeightRange(this.weightRange); // Copy links of the neuron Link resultLinks[] = new Link[this.links.length]; for (int i = 0; i < this.links.length; i++) { // Generate new link resultLinks[i] = new Link(); // Set link properties resultLinks[i].setBroken(this.links[i].isBroken()); if (!resultLinks[i].isBroken()) { resultLinks[i].setWeight(this.links[i].getWeight()); if (this.links[i].getOrigin() == null) resultLinks[i].setOrigin(null); else resultLinks[i].setOrigin(previousLayer.getNeuron(i)); resultLinks[i].setTarget(result); } } result.setLinks(resultLinks); } catch (InstantiationException e) { System.out.println("Instantiation Error " + e.getLocalizedMessage()); e.printStackTrace(); } catch (IllegalAccessException e) { System.out.println("Illegal Access Error " + e.getLocalizedMessage()); e.printStackTrace(); } return result; }
From source file:org.araqne.main.Araqne.java
/** * Register script factory to OSGi service registry. * /*www.j ava 2s . c o m*/ * @param scriptFactory * the script factory * @param alias * the script alias (e.g. logger is alias in "logger.list" * command) */ private void registerScriptFactory(Class<? extends ScriptFactory> scriptFactory, String alias) { try { registerScriptFactory(scriptFactory.newInstance(), alias); } catch (InstantiationException e) { e.printStackTrace(); } catch (IllegalAccessException e) { e.printStackTrace(); } }
From source file:org.sakaiproject.conditions.impl.ResourceReleaseRule.java
private List<Condition> reconstitutePredicates(Element element) { List<Condition> rv = new ArrayList<Condition>(); try {/* w ww .j av a2 s . co m*/ Condition aPredicate = null; NodeList children = element.getChildNodes(); final int length = children.getLength(); for (int i = 0; i < length; i++) { Node child = children.item(i); if (child.getNodeType() != Node.ELEMENT_NODE) continue; Element predicate = (Element) child; // look for properties if (predicate.getTagName().equals("predicate")) { String className = predicate.getAttribute("class"); aPredicate = (Condition) Class.forName(className).newInstance(); ((BooleanExpression) aPredicate).setReceiver(predicate.getAttribute("receiver")); ((BooleanExpression) aPredicate).setMethod(predicate.getAttribute("method")); ((BooleanExpression) aPredicate).setOperator(predicate.getAttribute("operator")); ((BooleanExpression) aPredicate).setArgument(predicate.getAttribute("argument")); rv.add(aPredicate); } } } catch (InstantiationException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (IllegalAccessException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (ClassNotFoundException e) { // TODO Auto-generated catch block e.printStackTrace(); } return rv; }
From source file:edu.umn.cs.spatialHadoop.mapred.SpatialRecordReader.java
/** * Reads all shapes left in the current block in one shot. This function * runs a loop where it keeps reading shapes by calling the method * {@link #nextShape(Shape)} until one of the following conditions happen. * 1. The whole file is read. No more records to read. * 2. Number of parsed records reaches the threshold defined by the * configuration parameter spatialHadoop.mapred.MaxShapesPerRead. * To disable this check, set the configuration parameter to -1 * 3. Total size of parsed data from file reaches the threshold defined by * the configuration parameter spatialHadoop.mapred.MaxBytesPerRead. * To disable this check, set the configuration parameter to -1. * //from w w w . j a va 2 s . com * @param shapes * @return * @throws IOException */ protected boolean nextShapes(ArrayWritable shapes) throws IOException { // Prepare a vector that will hold all objects in this Vector<Shape> vshapes = new Vector<Shape>(); try { Shape stockObject = (Shape) shapes.getValueClass().newInstance(); // Reached the end of this split if (getFilePosition() >= end) return false; long initialReadPos = getPos(); long readBytes = 0; // Read all shapes in this block while ((maxShapesInOneRead <= 0 || vshapes.size() < maxShapesInOneRead) && (maxBytesInOneRead <= 0 || readBytes < maxBytesInOneRead) && nextShape(stockObject)) { vshapes.add(stockObject.clone()); readBytes = getPos() - initialReadPos; } // Store them in the return value shapes.set(vshapes.toArray(new Shape[vshapes.size()])); return !vshapes.isEmpty(); } catch (InstantiationException e1) { e1.printStackTrace(); } catch (IllegalAccessException e1) { e1.printStackTrace(); } catch (OutOfMemoryError e) { LOG.error("Error reading shapes. Stopped with " + vshapes.size() + " shapes"); throw e; } return false; }
From source file:fr.cobaltians.cobalt.Cobalt.java
/********************************************************************************************** * CONFIGURATION FILE/* www . ja v a 2 s. c o m*/ **********************************************************************************************/ public CobaltFragment getFragmentForController(Class<?> CobaltFragmentClass, String controller, String page) { CobaltFragment fragment = null; try { if (CobaltFragment.class.isAssignableFrom(CobaltFragmentClass)) { fragment = (CobaltFragment) CobaltFragmentClass.newInstance(); Bundle configuration = getConfigurationForController(controller); configuration.putString(kPage, page); fragment.setArguments(configuration); } else if (Cobalt.DEBUG) Log.e(Cobalt.TAG, TAG + " - getFragmentForController: " + CobaltFragmentClass.getSimpleName() + " does not inherit from CobaltFragment!"); } catch (java.lang.InstantiationException exception) { if (Cobalt.DEBUG) Log.e(Cobalt.TAG, TAG + " - getFragmentForController: InstantiationException"); exception.printStackTrace(); } catch (IllegalAccessException exception) { if (Cobalt.DEBUG) Log.e(Cobalt.TAG, TAG + " - getFragmentForController: IllegalAccessException"); exception.printStackTrace(); } return fragment; }
From source file:org.wso2.developerstudio.eclipse.gmf.esb.internal.persistence.LoadBalanceEndPointTransformer.java
public LoadbalanceEndpoint create(TransformationInfo info, LoadBalanceEndPoint visualEndPoint, String name, List<Endpoint> endPoints) throws TransformerException { if (StringUtils.isEmpty(visualEndPoint.getName())) { throw new TransformerException( "Load-BalanceEndPoint should be configured. Double click on endpoint to configure."); }/*w w w . j av a 2 s . com*/ IEditorPart editorPart = null; IProject activeProject = null; List<Endpoint> endPointsList = new ArrayList<Endpoint>(); EndpointDefinition synapseEPDef = new EndpointDefinition(); LoadbalanceEndpoint synapseLBEP; if (visualEndPoint.getSessionType().equals(LoadBalanceSessionType.NONE)) { synapseLBEP = new LoadbalanceEndpoint(); } else { synapseLBEP = new SALoadbalanceEndpoint(); Long sessionTimeout = visualEndPoint.getSessionTimeout(); if (sessionTimeout != null) { ((SALoadbalanceEndpoint) synapseLBEP).setSessionTimeout(sessionTimeout); } } if (StringUtils.isNotBlank(name)) { synapseLBEP.setName(name); } /* * We should give this LoadbalanceAlgorithm class at runtime.User should * be requested to give a class. */ try { Class<?> algorithmClass = Class.forName(visualEndPoint.getAlgorithm().trim()); Object algorithm = algorithmClass.newInstance(); if (algorithm instanceof LoadbalanceAlgorithm) { synapseLBEP.setAlgorithm((LoadbalanceAlgorithm) algorithm); } } catch (ClassNotFoundException e1) { MessageDialog.openError(Display.getCurrent().getActiveShell(), "Error in Loadbalance Endpoint ! ", visualEndPoint.getAlgorithm().trim() + " algorithm class not found."); e1.printStackTrace(); } catch (InstantiationException e) { MessageDialog.openError(Display.getCurrent().getActiveShell(), "Error in Loadbalance Endpoint ! ", visualEndPoint.getAlgorithm().trim() + " algorithm class cannot be instantiated."); e.printStackTrace(); } catch (IllegalAccessException e) { MessageDialog.openError(Display.getCurrent().getActiveShell(), "Error in Loadbalance Endpoint ! ", "Illegal access to " + visualEndPoint.getAlgorithm().trim() + " algorithm class."); e.printStackTrace(); } switch (visualEndPoint.getSessionType()) { case SOAP: Dispatcher soapDispatcher = new SoapSessionDispatcher(); ((SALoadbalanceEndpoint) synapseLBEP).setDispatcher(soapDispatcher); break; case TRANSPORT: Dispatcher httpDispatcher = new HttpSessionDispatcher(); ((SALoadbalanceEndpoint) synapseLBEP).setDispatcher(httpDispatcher); break; case CLIENT_ID: Dispatcher csDispatcher = new SimpleClientSessionDispatcher(); ((SALoadbalanceEndpoint) synapseLBEP).setDispatcher(csDispatcher); break; case NONE: break; } synapseLBEP.setDefinition(synapseEPDef); saveProperties(visualEndPoint, synapseLBEP); if (!info.isEndPointFound) { info.isEndPointFound = true; info.firstEndPoint = visualEndPoint; } try { if (visualEndPoint.eContainer() instanceof EndpointDiagram) { ArrayList<LoadBalanceEndPointOutputConnector> connectors = new ArrayList<LoadBalanceEndPointOutputConnector>(); connectors.addAll(visualEndPoint.getOutputConnector()); for (LoadBalanceEndPointOutputConnector outputConnector : connectors) { if (outputConnector.getOutgoingLink() != null) { if (outputConnector.getOutgoingLink().getTarget() != null) { EsbNode esbNode = (EsbNode) outputConnector.getOutgoingLink().getTarget().eContainer(); EsbNodeTransformer transformer = EsbTransformerRegistry.getInstance() .getTransformer(esbNode); transformer.createSynapseObject(info, esbNode, endPointsList); } } } } else { IEditorReference editorReferences[] = PlatformUI.getWorkbench().getActiveWorkbenchWindow() .getActivePage().getEditorReferences(); for (int i = 0; i < editorReferences.length; i++) { IEditorPart editor = editorReferences[i].getEditor(false); if (editor != null) { editorPart = editor.getSite().getWorkbenchWindow().getActivePage().getActiveEditor(); } if (editorPart != null) { IEsbEditorInput input = (IEsbEditorInput) editorPart.getEditorInput(); IFile file = input.getXmlResource(); activeProject = file.getProject(); } } String endpointName = (String) visualEndPoint.getName(); IPath location = new Path( "src/main/synapse-config/complex-endpoints" + "/" + endpointName + ".xml"); IFile file = activeProject.getFile(location); final String source = FileUtils.getContentAsString(file.getContents()); OMElement element = AXIOMUtil.stringToOM(source); Properties properties = new Properties(); properties.put(WSDLEndpointFactory.SKIP_WSDL_PARSING, "true"); synapseLBEP = (LoadbalanceEndpoint) EndpointFactory.getEndpointFromElement(element, false, properties); } } catch (Exception e) { e.printStackTrace(); } if (synapseLBEP instanceof SALoadbalanceEndpoint) { synapseLBEP.setChildren(endPointsList); } else { if (endPointsList.size() > 0) { synapseLBEP.setChildren(endPointsList); } else { // Load Balance endpoint members. List<org.apache.axis2.clustering.Member> members = new ArrayList<org.apache.axis2.clustering.Member>(); if (visualEndPoint.getMember() != null && visualEndPoint.getMember().size() > 0) { EList<Member> visualMembers = visualEndPoint.getMember(); for (Member visualMember : visualMembers) { org.apache.axis2.clustering.Member member = new org.apache.axis2.clustering.Member( visualMember.getHostName(), -1); member.setHttpPort(Integer.parseInt(visualMember.getHttpPort())); member.setHttpsPort(Integer.parseInt(visualMember.getHttpsPort())); members.add(member); } } synapseLBEP.setMembers(members); } } return synapseLBEP; }
From source file:com.ushahidi.android.app.fragments.BaseSectionListFragment.java
@SuppressWarnings("unchecked") protected <T> T createInstance(Class<?> type, Class<?> constructor, Object... params) { try {//from ww w .j a v a 2 s . c o m return (T) type.getConstructor(constructor).newInstance(params); } catch (InstantiationException e) { log("InstantiationException", e); } catch (IllegalAccessException e) { log("IllegalAccessException", e); } catch (InvocationTargetException e) { log("InvocationTargetException", e); } catch (NoSuchMethodException e) { log("NoSuchMethodException", e); } catch (IllegalArgumentException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (java.lang.InstantiationException e) { // TODO Auto-generated catch block e.printStackTrace(); } return null; }
From source file:org.apache.hadoop.hive.ql.exec.AnalysisOperator.java
protected void initializeOp(Configuration hconf) throws HiveException { this.hconf = hconf; rowInspector = inputObjInspectors[0]; standardRowInspector = ObjectInspectorUtils.getStandardObjectInspector(rowInspector); isDistinct = conf.getDistinct();/*w ww . j a v a 2 s .co m*/ pkeyFields = new ExprNodeEvaluator[conf.getPartitionByKeys().size()]; pkeyObjectInspectors = new ObjectInspector[pkeyFields.length]; pkeyObjects = new Object[pkeyFields.length]; if (pkeyFields.length > 0) { for (int i = 0; i < pkeyFields.length; i++) { pkeyFields[i] = ExprNodeEvaluatorFactory.get(conf.getPartitionByKeys().get(i)); pkeyObjectInspectors[i] = pkeyFields[i].initialize(standardRowInspector); pkeyObjects[i] = null; } } okeyFields = new ExprNodeEvaluator[conf.getOrderByKeys().size()]; okeyObjectInspectors = new ObjectInspector[okeyFields.length]; okeyObjects = new Object[okeyFields.length]; for (int i = 0; i < okeyFields.length; i++) { okeyFields[i] = ExprNodeEvaluatorFactory.get(conf.getOrderByKeys().get(i)); okeyObjectInspectors[i] = okeyFields[i].initialize(standardRowInspector); okeyObjects[i] = null; } hasAggregateOrderBy = new boolean[conf.getAnalysises().size()]; hasAggregateOrderByRevIdx = new int[conf.getAnalysises().size()]; analysisParameterFields = new ExprNodeEvaluator[conf.getAnalysises().size()][]; analysisParameterObjectInspectors = new ObjectInspector[conf.getAnalysises().size()][]; analysisParameterObjects = new Object[conf.getAnalysises().size()][]; for (int i = 0; i < analysisParameterFields.length; i++) { analysisEvaluatorDesc aed = conf.getAnalysises().get(i); String udwfname = aed.getGenericUDWFName().toLowerCase(); ArrayList<exprNodeDesc> parameters = aed.getParameters(); hasAggregateOrderBy[i] = aed.hasAggregateOrderBy(); hasAggregateOrderByRevIdx[i] = -1; if (hasAggregateOrderBy[i]) { hasAggregateOrderByRevIdx[i] = hasAggregateOrderByNumber; hasAggregateOrderByNumber++; } if (udwfname.contains("lag")) { int lag = 1; if (parameters.size() > 1) { lag = (Integer) ((exprNodeConstantDesc) parameters.get(1)).getValue(); } if (lag > windowlag) windowlag = lag; } else if (udwfname.contains("lead")) { int lead = 1; if (parameters.size() > 1) { lead = (Integer) ((exprNodeConstantDesc) parameters.get(1)).getValue(); } if (lead > windowlead) windowlead = lead; } else if (udwfname.contains("row_number") || udwfname.contains("rank") || udwfname.contains("first_value")) { } else if (hasAggregateOrderBy[i]) { } else { this.forwardMode = ForwardMode.WHOLEPARTITION; } if (udwfname.contains("rank")) { parameters = new ArrayList<exprNodeDesc>(); parameters.addAll(conf.getOrderByKeys()); } analysisParameterFields[i] = new ExprNodeEvaluator[parameters.size()]; analysisParameterObjectInspectors[i] = new ObjectInspector[parameters.size()]; analysisParameterObjects[i] = new Object[parameters.size()]; for (int j = 0; j < parameters.size(); j++) { analysisParameterFields[i][j] = ExprNodeEvaluatorFactory.get(parameters.get(j)); analysisParameterObjectInspectors[i][j] = analysisParameterFields[i][j] .initialize(standardRowInspector); analysisParameterObjects[i][j] = null; } } hasAggregateOrderByIdx = new int[this.hasAggregateOrderByNumber]; int numm = 0; for (int j = 0; j < this.hasAggregateOrderBy.length; j++) { if (this.hasAggregateOrderBy[j]) { this.hasAggregateOrderByIdx[numm++] = j; } } otherColumns = new ExprNodeEvaluator[conf.getOtherColumns().size()]; otherColumnsObjectInspectors = new ObjectInspector[otherColumns.length]; otherColumnsObjects = new Object[otherColumns.length]; for (int i = 0; i < otherColumns.length; i++) { otherColumns[i] = ExprNodeEvaluatorFactory.get(conf.getOtherColumns().get(i)); otherColumnsObjectInspectors[i] = otherColumns[i].initialize(standardRowInspector); otherColumnsObjects[i] = null; } analysisIsDistinct = new boolean[conf.getAnalysises().size()]; for (int i = 0; i < analysisIsDistinct.length; i++) { analysisIsDistinct[i] = conf.getAnalysises().get(i).getDistinct(); } analysisEvaluators = new GenericUDWFEvaluator[conf.getAnalysises().size()]; for (int i = 0; i < analysisEvaluators.length; i++) { analysisEvaluatorDesc agg = conf.getAnalysises().get(i); analysisEvaluators[i] = agg.getGenericUDWFEvaluator(); } int totalFields = pkeyFields.length + okeyFields.length + analysisEvaluators.length + otherColumns.length; objectInspectors = new ArrayList<ObjectInspector>(totalFields); for (int i = 0; i < pkeyFields.length; i++) { objectInspectors.add(pkeyObjectInspectors[i]); } for (int i = 0; i < okeyFields.length; i++) { objectInspectors.add(okeyObjectInspectors[i]); } ArrayList<ObjectInspector> aggregateOrderByObjectInspectors = new ArrayList<ObjectInspector>(); ArrayList<String> anaStoredName = new ArrayList<String>(); for (int i = 0; i < analysisEvaluators.length; i++) { ObjectInspector roi = analysisEvaluators[i].init(analysisParameterObjectInspectors[i]); objectInspectors.add(roi); if (hasAggregateOrderBy[i]) { anaStoredName.add("aggr" + i); aggregateOrderByObjectInspectors.add(roi); } } for (int i = 0; i < otherColumns.length; i++) { objectInspectors.add(otherColumnsObjectInspectors[i]); } outputObjInspector = ObjectInspectorFactory.getStandardStructObjectInspector(conf.getOutputColumnNames(), objectInspectors); anaStoredName.add(0, "rowobj"); aggregateOrderByObjectInspectors.add(0, standardRowInspector); aggregateOrderByObjectInspectorANAStore = ObjectInspectorFactory .getStandardStructObjectInspector(anaStoredName, aggregateOrderByObjectInspectors); ArrayList<String> keyNames = new ArrayList<String>(pkeyFields.length); for (int i = 0; i < pkeyFields.length; i++) { keyNames.add(conf.getOutputColumnNames().get(i)); } pcurrentKeyObjectInspector = ObjectInspectorFactory.getStandardStructObjectInspector(keyNames, Arrays.asList(pkeyObjectInspectors)); pnewKeyObjectInspector = ObjectInspectorFactory.getStandardStructObjectInspector(keyNames, Arrays.asList(pkeyObjectInspectors)); analysisParametersLastInvoke = new Object[conf.getAnalysises().size()][]; aggregations = newAggregations(); pnewKeys = new ArrayList<Object>(); StringBuffer colNames = new StringBuffer(); StringBuffer colTypes = new StringBuffer(); StructObjectInspector soi = (StructObjectInspector) aggregateOrderByObjectInspectorANAStore; List<? extends StructField> fields = soi.getAllStructFieldRefs(); for (int k = 0; k < fields.size(); k++) { String newColName = "_VALUE_" + k; colNames.append(newColName); colNames.append(','); colTypes.append(fields.get(k).getFieldObjectInspector().getTypeName()); colTypes.append(','); } colNames.setLength(colNames.length() - 1); colTypes.setLength(colTypes.length() - 1); Properties properties = Utilities.makeProperties( org.apache.hadoop.hive.serde.Constants.SERIALIZATION_FORMAT, "" + Utilities.ctrlaCode, org.apache.hadoop.hive.serde.Constants.LIST_COLUMNS, colNames.toString(), org.apache.hadoop.hive.serde.Constants.LIST_COLUMN_TYPES, colTypes.toString()); try { anaserde = LazyBinarySerDe.class.newInstance(); anaserde.initialize(hconf, properties); } catch (InstantiationException e) { e.printStackTrace(); } catch (IllegalAccessException e) { e.printStackTrace(); } catch (SerDeException e) { e.printStackTrace(); } anabuffer = new AnalysisBuffer<Object>(anaserde, this.aggregateOrderByObjectInspectorANAStore, hconf); System.out.println("hasAggregateOrderByNumber\t" + hasAggregateOrderByNumber); for (int i = 0; i < hasAggregateOrderBy.length; i++) { System.out.print(hasAggregateOrderBy[i] + "\t"); } System.out.println(); for (int i = 0; i < hasAggregateOrderByIdx.length; i++) { System.out.print(hasAggregateOrderByIdx[i] + "\t"); } System.out.println(); for (int i = 0; i < hasAggregateOrderByRevIdx.length; i++) { System.out.print(hasAggregateOrderByRevIdx[i] + "\t"); } System.out.println(); initializeChildren(hconf); }