List of usage examples for java.util HashSet size
public int size()
From source file:org.gcaldaemon.core.notifier.GmailNotifier.java
private static final String[] getActiveUsers() { HashSet users = new HashSet(); try {//ww w .ja v a 2s . c o m String me = System.getProperty("user.name"); if (me != null) { users.add(me); } } catch (Exception ignored) { } try { String os = System.getProperty("os.name", "unknown"); if (commandExecutable && os.toLowerCase().indexOf("windows") != -1) { // Execute script ProcessBuilder builder = new ProcessBuilder(TASK_COMMAND); Process tasklist = builder.start(); // Read command output InputStream in = tasklist.getInputStream(); QuickWriter buffer = new QuickWriter(); BufferedInputStream bis = new BufferedInputStream(in); InputStreamReader isr = new InputStreamReader(bis); char[] chars = new char[1024]; int len; while ((len = isr.read(chars)) != -1) { buffer.write(chars, 0, len); } // Parse output String token, out = buffer.toString(); StringTokenizer lines = new StringTokenizer(out, "\r\n"); StringTokenizer tokens; int i; while (lines.hasMoreTokens()) { tokens = new StringTokenizer(lines.nextToken(), "\"", false); while (tokens.hasMoreTokens()) { token = tokens.nextToken(); i = token.indexOf('\\'); if (i != -1) { token = token.substring(i + 1); if (token.length() != 0) { users.add(token); break; } } } } } } catch (Exception invalidSyntax) { commandExecutable = false; log.debug(invalidSyntax); } String[] array = new String[users.size()]; if (array.length > 0) { users.toArray(array); if (log.isDebugEnabled()) { QuickWriter writer = new QuickWriter(100); for (int i = 0; i < array.length; i++) { writer.write(array[i]); if (i < array.length - 1) { writer.write(", "); } } log.debug("Active users: " + writer.toString()); } } return array; }
From source file:org.apache.hadoop.yarn.server.resourcemanager.monitor.capacity.ProportionalCapacityPreemptionPolicyMockFramework.java
/** * Format is://from w ww . j av a2s. co m * <pre> * queueName\t // app1 * (priority,resource,host,expression,#repeat,reserved) * (priority,resource,host,expression,#repeat,reserved); * queueName\t // app2 * </pre> */ private void mockApplications(String appsConfig) { int id = 1; HashMap<String, HashSet<String>> userMap = new HashMap<String, HashSet<String>>(); LeafQueue queue = null; for (String a : appsConfig.split(";")) { String[] strs = a.split("\t"); String queueName = strs[0]; // get containers List<RMContainer> liveContainers = new ArrayList<RMContainer>(); List<RMContainer> reservedContainers = new ArrayList<RMContainer>(); ApplicationId appId = ApplicationId.newInstance(0L, id); ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance(appId, 1); FiCaSchedulerApp app = mock(FiCaSchedulerApp.class); when(app.getAMResource(anyString())).thenReturn(Resources.createResource(0, 0)); mockContainers(strs[1], app, appAttemptId, queueName, reservedContainers, liveContainers); LOG.debug("Application mock: queue: " + queueName + ", appId:" + appId); when(app.getLiveContainers()).thenReturn(liveContainers); when(app.getReservedContainers()).thenReturn(reservedContainers); when(app.getApplicationAttemptId()).thenReturn(appAttemptId); when(app.getApplicationId()).thenReturn(appId); // add to LeafQueue queue = (LeafQueue) nameToCSQueues.get(queueName); queue.getApplications().add(app); queue.getAllApplications().add(app); HashSet<String> users = userMap.get(queueName); if (null == users) { users = new HashSet<String>(); userMap.put(queueName, users); } users.add(app.getUser()); id++; } for (String queueName : userMap.keySet()) { queue = (LeafQueue) nameToCSQueues.get(queueName); // Currently we have user-limit test support only for default label. Resource totResoucePerPartition = partitionToResource.get(""); Resource capacity = Resources.multiply(totResoucePerPartition, queue.getQueueCapacities().getAbsoluteCapacity()); HashSet<String> users = userMap.get(queue.getQueueName()); Resource userLimit = Resources.divideAndCeil(rc, capacity, users.size()); for (String user : users) { when(queue.getUserLimitPerUser(eq(user), any(Resource.class), anyString())).thenReturn(userLimit); } } }
From source file:net.yacy.peers.Protocol.java
/** * this is called to enrich the seed information by - own address (if peer is behind a nat/router) - check * peer type (virgin/junior/senior/principal) to do this, we send a 'Hello' to another peer this carries * the following information: 'iam' - own hash 'youare' - remote hash, to verify that we are correct 'key' * - a session key that the remote peer may use to answer and the own seed string we expect the following * information to be send back: - 'yourip' the ip of the connection peer (we) - 'yourtype' the type of * this peer that the other peer checked by asking for a specific word and the remote seed string one * exceptional failure case is when we know the other's peers hash, the other peers responds correctly but * they appear to be another peer by comparisment of the other peer's hash this works of course only if we * know the other peer's hash.// ww w. ja v a 2s . c om * * @return the number of new seeds */ public static Map<String, String> hello(final Seed mySeed, final PeerActions peerActions, final MultiProtocolURL targetBaseURL, final String targetHash) { Map<String, String> result = null; final String salt = crypt.randomSalt(); long responseTime = Long.MAX_VALUE; byte[] content = null; try { // generate request final Map<String, ContentBody> parts = basicRequestParts(Switchboard.getSwitchboard(), null, salt); parts.put("count", UTF8.StringBody("20")); parts.put("magic", UTF8.StringBody(Long.toString(Network.magic))); parts.put("seed", UTF8.StringBody(mySeed.genSeedStr(salt))); // send request final long start = System.currentTimeMillis(); // final byte[] content = HTTPConnector.getConnector(MultiProtocolURI.yacybotUserAgent).post(new MultiProtocolURI("http://" + address + "/yacy/hello.html"), 30000, yacySeed.b64Hash2hexHash(otherHash) + ".yacyh", parts); final HTTPClient httpClient = new HTTPClient(ClientIdentification.yacyInternetCrawlerAgent, 30000); content = httpClient.POSTbytes(new MultiProtocolURL(targetBaseURL, "/yacy/hello.html"), Seed.b64Hash2hexHash(targetHash) + ".yacyh", parts, false, true); responseTime = System.currentTimeMillis() - start; result = FileUtils.table(content); } catch (final Exception e) { if (Thread.currentThread().isInterrupted()) { Network.log.info("yacyClient.hello thread '" + Thread.currentThread().getName() + "' interrupted."); return null; } Network.log.info("yacyClient.hello thread '" + Thread.currentThread().getName() + "', peer " + targetBaseURL + "; exception: " + e.getMessage()); // try again (go into loop) result = null; } if (result == null || result.size() == 0) { Network.log.info("yacyClient.hello result error: " + ((result == null) ? "result null" : ("result=" + result.toString()))); return null; } Network.log.info("yacyClient.hello thread '" + Thread.currentThread().getName() + "' contacted peer at " + targetBaseURL + ", received " + ((content == null) ? "null" : content.length) + " bytes, time = " + responseTime + " milliseconds"); // check consistency with expectation Seed otherPeer = null; String seed; if ((targetHash != null) && (targetHash.length() > 0) && ((seed = result.get("seed0")) != null)) { if (seed.length() > Seed.maxsize) { Network.log.info("hello/client 0: rejected contacting seed; too large (" + seed.length() + " > " + Seed.maxsize + ")"); } else { try { // patch the remote peer address to avoid that remote peers spoof the network with wrong addresses String host = Domains.stripToHostName(targetBaseURL.getHost()); InetAddress ie = Domains.dnsResolve(host); otherPeer = Seed.genRemoteSeed(seed, false, ie.getHostAddress()); if (!otherPeer.hash.equals(targetHash)) { Network.log.info("yacyClient.hello: consistency error: otherPeer.hash = " + otherPeer.hash + ", otherHash = " + targetHash); return null; // no success } } catch (final IOException e) { Network.log.info("yacyClient.hello: consistency error: other seed bad:" + e.getMessage() + ", seed=" + seed); return null; // no success } } } // get access type response String mytype = result.get(Seed.YOURTYPE); if (mytype == null) { mytype = ""; } // set my own seed according to new information // we overwrite our own IP number only if (serverCore.useStaticIP) { mySeed.setIPs(Switchboard.getSwitchboard().myPublicIPs()); } else { final String myIP = result.get("yourip"); if (myIP == null) { Network.log.info( "yacyClient.hello result error: Peer sent incompleet hello message (key yourip is missing)"); return null; // no success } // with the IPv6 extension, this may contain several ips, separated by comma ',' HashSet<String> h = new HashSet<>(); for (String s : CommonPattern.COMMA.split(myIP)) { if (s.length() > 0 && Seed.isProperIP(s)) h.add(s); } if (h.size() > 0) mySeed.setIPs(h); } mySeed.setFlagRootNode((mytype.equals(Seed.PEERTYPE_SENIOR) || mytype.equals(Seed.PEERTYPE_PRINCIPAL)) && Switchboard.getSwitchboard().index.fulltext().connectedLocalSolr() && responseTime < 1000 && Domains.isThisHostIP(mySeed.getIPs())); // change our seed-type final Accessible accessible = new Accessible(); if (mytype.equals(Seed.PEERTYPE_SENIOR) || mytype.equals(Seed.PEERTYPE_PRINCIPAL)) { accessible.IWasAccessed = true; if (mySeed.isPrincipal()) { mytype = Seed.PEERTYPE_PRINCIPAL; } } else { accessible.IWasAccessed = false; } accessible.lastUpdated = System.currentTimeMillis(); Network.amIAccessibleDB.put(targetHash, accessible); /* * If we were reported as junior we have to check if your port forwarding channel is broken * If this is true we try to reconnect the sch channel to the remote server now. */ if (mytype.equalsIgnoreCase(Seed.PEERTYPE_JUNIOR)) { Network.log.info("yacyClient.hello: Peer '" + ((otherPeer == null) ? "unknown" : otherPeer.getName()) + "' reported us as junior."); } else if ((mytype.equalsIgnoreCase(Seed.PEERTYPE_SENIOR)) || (mytype.equalsIgnoreCase(Seed.PEERTYPE_PRINCIPAL))) { if (Network.log.isFine()) { Network.log .fine("yacyClient.hello: Peer '" + ((otherPeer == null) ? "unknown" : otherPeer.getName()) + "' reported us as " + mytype + ", accepted other peer."); } } else { // wrong type report if (Network.log.isFine()) { Network.log .fine("yacyClient.hello: Peer '" + ((otherPeer == null) ? "unknown" : otherPeer.getName()) + "' reported us as " + mytype + ", rejecting other peer."); } return null; } if (mySeed.orVirgin().equals(Seed.PEERTYPE_VIRGIN)) { mySeed.put(Seed.PEERTYPE, mytype); } final String error = mySeed.isProper(true); if (error != null) { Network.log.warn("yacyClient.hello mySeed error - not proper: " + error); return null; } //final Date remoteTime = yacyCore.parseUniversalDate((String) result.get(yacySeed.MYTIME)); // read remote time // read the seeds that the peer returned and integrate them into own database int i = 0; String seedStr; Seed s; final int connectedBefore = peerActions.sizeConnected(); while ((seedStr = result.get("seed" + i++)) != null) { // integrate new seed into own database // the first seed, "seed0" is the seed of the responding peer if (seedStr.length() > Seed.maxsize) { Network.log.info("hello/client: rejected contacting seed; too large (" + seedStr.length() + " > " + Seed.maxsize + ")"); } else { try { if (i == 1) { String host = Domains.stripToHostName(targetBaseURL.getHost()); InetAddress ia = Domains.dnsResolve(host); if (ia == null) continue; host = ia.getHostAddress(); // the actual address of the target as we had been successful when contacting them is patched here s = Seed.genRemoteSeed(seedStr, false, host); } else { s = Seed.genRemoteSeed(seedStr, false, null); } peerActions.peerArrival(s, (i == 1)); } catch (final IOException e) { Network.log.info("hello/client: rejected contacting seed; bad (" + e.getMessage() + ")"); } } } final int connectedAfter = peerActions.sizeConnected(); // update event tracker EventTracker.update(EventTracker.EClass.PEERPING, new ProfilingGraph.EventPing(mySeed.getName(), targetHash, true, connectedAfter - connectedBefore), false); return result; }
From source file:amie.keys.CSAKey.java
public void discoverConditionalKeysForCondition(Graph newGraph, Graph graph, HashSet<Node> candidateKeys, Rule conditionRule, Set<Rule> output) { HashSet<Node> newCandidateKeys = new HashSet<>(); for (Node candidateKey : candidateKeys) { // System.out.println("candidateKey:" + candidateKey); if (candidateKey.toExplore) { // System.out.println("candidate:" + candidateKey); // if (candidateKey.toExplore) { List<String> properties = candidateKey.mapToString(id2Property); Rule amieRule = buildAMIERule(properties, conditionRule); // System.out.println("rule:" + amieRule); boolean isConditionalKey = isConditionaKey(amieRule); //System.out.println("isConditionalKey:"+isConditionalKey + " Thread " + Thread.currentThread().getId() + "\t" + Utilities.formatKey(amieRule)); if (amieRule.getSupport() >= support && !isConditionalKey) { //System.out.println("Case 0" + " Thread " + Thread.currentThread().getId()); if (!newGraph.graph.containsKey(candidateKey)) { //System.out.println("Case 1" + " Thread " + Thread.currentThread().getId()); Node newCandidateKey = candidateKey.clone(); HashSet<Node> children = new HashSet<>(); newGraph.graph.put(newCandidateKey, children); newGraph.nodes.put(newCandidateKey, newCandidateKey); newCandidateKeys.add(newCandidateKey); } else { //System.out.println("Case 2" + " Thread " + Thread.currentThread().getId()); HashSet<Node> children = new HashSet<>(); newGraph.graph.put(candidateKey, children); newCandidateKeys.add(candidateKey); }/*from ww w . ja va 2s.c o m*/ } // If the rule is a conditional above the support // and there is no a simpler key already discovered // then output it if (isConditionalKey && amieRule.getSupport() >= support && !isSubsumedByKey(amieRule, conditionRule, conditions2Keys)) { // System.out.println("KEY"); if (!newGraph.graph.containsKey(candidateKey)) { // System.out.println("clone"); Node newCandidateKey = candidateKey.clone(); synchronized (output) { output.add(amieRule); } //System.out.println(Utilities.formatKey(amieRule) + "\tThread " + Thread.currentThread().getId() + " Case 3"); System.out.println(Utilities.formatKey(amieRule)); conditions2Keys.put(conditionRule, amieRule); newCandidateKey.toExplore = false; HashSet<Node> children = new HashSet<>(); newGraph.graph.put(newCandidateKey, children); newGraph.nodes.put(newCandidateKey, newCandidateKey); newCandidateKeys.add(newCandidateKey); } else { synchronized (output) { output.add(amieRule); } System.out.println(Utilities.formatKey(amieRule)); //System.out.println(Utilities.formatKey(amieRule) + "\tThread " + Thread.currentThread().getId() + " Case 4"); conditions2Keys.put(conditionRule, amieRule); candidateKey.toExplore = false; HashSet<Node> children = new HashSet<>(); newGraph.graph.put(candidateKey, children); newGraph.nodes.put(candidateKey, candidateKey); newCandidateKeys.add(candidateKey); } } } else { //System.out.println("Case 5"); newCandidateKeys.add(candidateKey); } } // createChildren HashSet<Node> allChildren = new HashSet<>(); // System.out.println("newCandidateKeys:"+newCandidateKeys); for (Node parent1 : newCandidateKeys) { // System.out.println("parent1:"+parent1); for (Node parent2 : newCandidateKeys) { if (parent1 != parent2 && parent1.toExplore != false && parent2.toExplore != false) { HashSet<Integer> newSet = new HashSet<>(); newSet.addAll(parent1.set); newSet.addAll(parent2.set); HashSet<Integer> condProp_KeyProp = new HashSet<>(); condProp_KeyProp.addAll(newSet); condProp_KeyProp.addAll(getRelations(conditionRule, property2Id)); // System.out.println("newSet:" + newSet); if ((newSet.size() == parent1.set.size() + 1) && (getSupport(newSet, conditionRule, (int) support)) && Graph.containsASuperSetOf(nonKeysInt, condProp_KeyProp) != -1) { // System.out.println("enters"); Node child = new Node(newSet); if (hasFalseParent(newSet, newCandidateKeys)) { // System.out.println("falseParent"); child.toExplore = false; } HashSet<Node> children1 = newGraph.graph.get(parent1); children1.add(child); newGraph.graph.put(parent1, children1); newGraph.nodes.put(child, child); HashSet<Node> grandChildren = new HashSet<>(); newGraph.graph.put(child, grandChildren); HashSet<Node> children2 = newGraph.graph.get(parent2); children2.add(child); newGraph.graph.put(parent2, children2); allChildren.add(child); } } } } if (!allChildren.isEmpty()) { discoverConditionalKeysForCondition(newGraph, newGraph, allChildren, conditionRule, output); } }
From source file:cz.cas.lib.proarc.webapp.server.rest.DigitalObjectResource.java
/** * Adds new object members. Members that already exists remain untouched. * //from w w w.j av a 2s. com * @param parentPid PID of parent object * @param toAddPids list of PIDs to add; cannot contain parent PID * @return list of added members */ @POST @Path(DigitalObjectResourceApi.MEMBERS_PATH) @Produces({ MediaType.APPLICATION_JSON }) public SmartGwtResponse<Item> addMembers( @FormParam(DigitalObjectResourceApi.MEMBERS_ITEM_PARENT) String parentPid, @FormParam(DigitalObjectResourceApi.MEMBERS_ITEM_PID) List<String> toAddPids, @FormParam(DigitalObjectResourceApi.MEMBERS_ITEM_BATCHID) Integer batchId) throws IOException, FedoraClientException, DigitalObjectException { if (parentPid == null) { throw RestException.plainNotFound(DigitalObjectResourceApi.MEMBERS_ITEM_PARENT, null); } if (toAddPids == null || toAddPids.isEmpty()) { throw RestException.plainNotFound(DigitalObjectResourceApi.MEMBERS_ITEM_PID, null); } if (toAddPids.contains(parentPid)) { throw RestException.plainText(Status.BAD_REQUEST, "parent and pid are same!"); } HashSet<String> addPidSet = new HashSet<String>(toAddPids); if (addPidSet.size() != toAddPids.size()) { throw RestException.plainText(Status.BAD_REQUEST, "Duplicate children in the request!"); } // XXX loadLocalSearchItems Map<String, Item> memberSearchMap = loadSearchItems(addPidSet); DigitalObjectHandler handler = findHandler(parentPid, batchId, false); List<Item> added = addMembers(handler, toAddPids, memberSearchMap); handler.commit(); return new SmartGwtResponse<Item>(added); }
From source file:cz.cas.lib.proarc.webapp.server.rest.DigitalObjectResource.java
/** * Sets new member sequence of given parent digital object. * * @param parentPid parent PID//w ww. j a va2 s .com * @param batchId batch import ID * @param toSetPids list of member PIDS * @return ordered list of members * @throws RestException */ @PUT @Path(DigitalObjectResourceApi.MEMBERS_PATH) @Consumes({ MediaType.APPLICATION_FORM_URLENCODED }) @Produces({ MediaType.APPLICATION_JSON }) public SmartGwtResponse<Item> setMembers( @FormParam(DigitalObjectResourceApi.MEMBERS_ITEM_PARENT) String parentPid, @FormParam(DigitalObjectResourceApi.MEMBERS_ITEM_BATCHID) Integer batchId, @FormParam(DigitalObjectResourceApi.MEMBERS_ITEM_PID) List<String> toSetPids // XXX long timestamp ) throws IOException, FedoraClientException, DigitalObjectException { // LOG.log(Level.INFO, "parentPid: {0}, batchId: {1}, toSetPids: {2}", // new Object[]{parentPid, batchId, toSetPids}); if (batchId == null && parentPid == null) { throw RestException.plainNotFound(DigitalObjectResourceApi.MEMBERS_ITEM_PARENT, null); } boolean batchImportMembers = batchId != null; if (toSetPids == null || toSetPids.isEmpty()) { throw RestException.plainNotFound(DigitalObjectResourceApi.MEMBERS_ITEM_PID, null); } if (!batchImportMembers && toSetPids.contains(parentPid)) { throw RestException.plainText(Status.BAD_REQUEST, "parent and pid are same!"); } HashSet<String> toSetPidSet = new HashSet<String>(toSetPids); if (toSetPidSet.size() != toSetPids.size()) { throw RestException.plainText(Status.BAD_REQUEST, "duplicates in PIDs to set!\n" + toSetPids.toString()); } Batch batch = batchId == null ? null : importManager.get(batchId); // fetch PID[] -> Item[] Map<String, Item> memberSearchMap; if (batchImportMembers) { memberSearchMap = loadLocalSearchItems(batch); checkSearchedMembers(toSetPidSet, memberSearchMap); } else { memberSearchMap = loadSearchItems(toSetPidSet); } // load current members DigitalObjectHandler doHandler = findHandler(parentPid, batch, false); RelationEditor editor = doHandler.relations(); List<String> members = editor.getMembers(); members.clear(); // add new members ArrayList<Item> added = new ArrayList<Item>(); for (String addPid : toSetPids) { if (!members.contains(addPid)) { members.add(addPid); Item item = memberSearchMap.get(addPid); if (item == null) { throw RestException.plainNotFound(DigitalObjectResourceApi.MEMBERS_ITEM_PID, toSetPids.toString()); } item.setParentPid(parentPid); added.add(item); } } editor.setMembers(members); editor.write(editor.getLastModified(), session.asFedoraLog()); doHandler.commit(); return new SmartGwtResponse<Item>(added); }
From source file:org.apache.axis.wsdl.toJava.JavaStubWriter.java
/** * Write the body of the binding's stub file. * /*w w w . j av a2 s .c o m*/ * @param pw * @throws IOException */ protected void writeFileBody(PrintWriter pw) throws IOException { PortType portType = binding.getPortType(); HashSet types = getTypesInPortType(portType); boolean hasMIME = Utils.hasMIME(bEntry); if ((types.size() > 0) || hasMIME) { pw.println(" private java.util.Vector cachedSerClasses = new java.util.Vector();"); pw.println(" private java.util.Vector cachedSerQNames = new java.util.Vector();"); pw.println(" private java.util.Vector cachedSerFactories = new java.util.Vector();"); pw.println(" private java.util.Vector cachedDeserFactories = new java.util.Vector();"); } pw.println(); pw.println(" static org.apache.axis.description.OperationDesc [] _operations;"); pw.println(); writeOperationMap(pw); pw.println(); pw.println(" public " + className + "() throws org.apache.axis.AxisFault {"); pw.println(" this(null);"); pw.println(" }"); pw.println(); pw.println(" public " + className + "(java.net.URL endpointURL, javax.xml.rpc.Service service) throws org.apache.axis.AxisFault {"); pw.println(" this(service);"); pw.println(" super.cachedEndpoint = endpointURL;"); pw.println(" }"); pw.println(); pw.println( " public " + className + "(javax.xml.rpc.Service service) throws org.apache.axis.AxisFault {"); pw.println(" if (service == null) {"); pw.println(" super.service = new org.apache.axis.client.Service();"); pw.println(" } else {"); pw.println(" super.service = service;"); pw.println(" }"); pw.println(" ((org.apache.axis.client.Service)super.service).setTypeMappingVersion(\"" + emitter.getTypeMappingVersion() + "\");"); List deferredBindings = new ArrayList(); // keep track of how many type mappings we write out int typeMappingCount = 0; if (types.size() > 0) { Iterator it = types.iterator(); while (it.hasNext()) { TypeEntry type = (TypeEntry) it.next(); if (!Utils.shouldEmit(type)) { continue; } // Write out serializer declarations if (typeMappingCount == 0) { writeSerializationDecls(pw, hasMIME, binding.getQName().getNamespaceURI()); } // write the type mapping for this type // writeSerializationInit(pw, type); deferredBindings.add(type); // increase the number of type mappings count typeMappingCount++; } } // Sort the TypeEntry's by their qname. Collections.sort(deferredBindings, new Comparator() { public int compare(Object a, Object b) { TypeEntry type1 = (TypeEntry) a; TypeEntry type2 = (TypeEntry) b; return type1.getQName().toString().compareToIgnoreCase(type2.getQName().toString()); } }); // We need to write out the MIME mapping, even if we don't have // any type mappings if ((typeMappingCount == 0) && hasMIME) { writeSerializationDecls(pw, hasMIME, binding.getQName().getNamespaceURI()); typeMappingCount++; } // track whether the number of bindings exceeds the threshold // that we allow per method. boolean needsMultipleBindingMethods = false; if (deferredBindings.size() < MAXIMUM_BINDINGS_PER_METHOD) { // small number of bindings, just inline them: for (Iterator it = deferredBindings.iterator(); it.hasNext();) { writeSerializationInit(pw, (TypeEntry) it.next()); } } else { needsMultipleBindingMethods = true; int methodCount = calculateBindingMethodCount(deferredBindings); // invoke each of the soon-to-be generated addBindings methods // from the constructor. for (int i = 0; i < methodCount; i++) { pw.println(" addBindings" + i + "();"); } } pw.println(" }"); pw.println(); // emit any necessary methods for assembling binding metadata. if (needsMultipleBindingMethods) { writeBindingMethods(pw, deferredBindings); pw.println(); } pw.println(" protected org.apache.axis.client.Call createCall() throws java.rmi.RemoteException {"); pw.println(" try {"); pw.println(" org.apache.axis.client.Call _call = super._createCall();"); pw.println(" if (super.maintainSessionSet) {"); pw.println(" _call.setMaintainSession(super.maintainSession);"); pw.println(" }"); pw.println(" if (super.cachedUsername != null) {"); pw.println(" _call.setUsername(super.cachedUsername);"); pw.println(" }"); pw.println(" if (super.cachedPassword != null) {"); pw.println(" _call.setPassword(super.cachedPassword);"); pw.println(" }"); pw.println(" if (super.cachedEndpoint != null) {"); pw.println(" _call.setTargetEndpointAddress(super.cachedEndpoint);"); pw.println(" }"); pw.println(" if (super.cachedTimeout != null) {"); pw.println(" _call.setTimeout(super.cachedTimeout);"); pw.println(" }"); pw.println(" if (super.cachedPortName != null) {"); pw.println(" _call.setPortName(super.cachedPortName);"); pw.println(" }"); pw.println(" java.util.Enumeration keys = super.cachedProperties.keys();"); pw.println(" while (keys.hasMoreElements()) {"); pw.println(" java.lang.String key = (java.lang.String) keys.nextElement();"); pw.println(" _call.setProperty(key, super.cachedProperties.get(key));"); pw.println(" }"); if (typeMappingCount > 0) { pw.println(" // " + Messages.getMessage("typeMap00")); pw.println(" // " + Messages.getMessage("typeMap01")); pw.println(" // " + Messages.getMessage("typeMap02")); pw.println(" // " + Messages.getMessage("typeMap03")); pw.println(" // " + Messages.getMessage("typeMap04")); pw.println(" synchronized (this) {"); pw.println(" if (firstCall()) {"); // Hack alert - we need to establish the encoding style before we register type mappings due // to the fact that TypeMappings key off of encoding style pw.println(" // " + Messages.getMessage("mustSetStyle")); if (bEntry.hasLiteral()) { pw.println(" _call.setEncodingStyle(null);"); } else { Iterator iterator = bEntry.getBinding().getExtensibilityElements().iterator(); while (iterator.hasNext()) { Object obj = iterator.next(); if (obj instanceof SOAPBinding) { pw.println( " _call.setSOAPVersion(org.apache.axis.soap.SOAPConstants.SOAP11_CONSTANTS);"); pw.println( " _call.setEncodingStyle(org.apache.axis.Constants.URI_SOAP11_ENC);"); } else if (obj instanceof UnknownExtensibilityElement) { // TODO: After WSDL4J supports soap12, change this code UnknownExtensibilityElement unkElement = (UnknownExtensibilityElement) obj; QName name = unkElement.getElementType(); if (name.getNamespaceURI().equals(Constants.URI_WSDL12_SOAP) && name.getLocalPart().equals("binding")) { pw.println( " _call.setSOAPVersion(org.apache.axis.soap.SOAPConstants.SOAP12_CONSTANTS);"); pw.println( " _call.setEncodingStyle(org.apache.axis.Constants.URI_SOAP12_ENC);"); } } } } pw.println(" for (int i = 0; i < cachedSerFactories.size(); ++i) {"); pw.println(" java.lang.Class cls = (java.lang.Class) cachedSerClasses.get(i);"); pw.println(" javax.xml.namespace.QName qName ="); pw.println(" (javax.xml.namespace.QName) cachedSerQNames.get(i);"); pw.println(" java.lang.Object x = cachedSerFactories.get(i);"); pw.println(" if (x instanceof Class) {"); pw.println(" java.lang.Class sf = (java.lang.Class)"); pw.println(" cachedSerFactories.get(i);"); pw.println(" java.lang.Class df = (java.lang.Class)"); pw.println(" cachedDeserFactories.get(i);"); pw.println(" _call.registerTypeMapping(cls, qName, sf, df, false);"); pw.println(" }"); pw.println(" else if (x instanceof javax.xml.rpc.encoding.SerializerFactory) {"); pw.println( " org.apache.axis.encoding.SerializerFactory sf = (org.apache.axis.encoding.SerializerFactory)"); pw.println(" cachedSerFactories.get(i);"); pw.println( " org.apache.axis.encoding.DeserializerFactory df = (org.apache.axis.encoding.DeserializerFactory)"); pw.println(" cachedDeserFactories.get(i);"); pw.println(" _call.registerTypeMapping(cls, qName, sf, df, false);"); pw.println(" }"); pw.println(" }"); pw.println(" }"); pw.println(" }"); } pw.println(" return _call;"); pw.println(" }"); pw.println(" catch (java.lang.Throwable _t) {"); pw.println(" throw new org.apache.axis.AxisFault(\"" + Messages.getMessage("badCall01") + "\", _t);"); pw.println(" }"); pw.println(" }"); pw.println(); List operations = binding.getBindingOperations(); for (int i = 0; i < operations.size(); ++i) { BindingOperation operation = (BindingOperation) operations.get(i); Parameters parameters = bEntry.getParameters(operation.getOperation()); // Get the soapAction from the <soap:operation> String soapAction = ""; String opStyle = null; Iterator operationExtensibilityIterator = operation.getExtensibilityElements().iterator(); for (; operationExtensibilityIterator.hasNext();) { Object obj = operationExtensibilityIterator.next(); if (obj instanceof SOAPOperation) { soapAction = ((SOAPOperation) obj).getSoapActionURI(); opStyle = ((SOAPOperation) obj).getStyle(); break; } else if (obj instanceof UnknownExtensibilityElement) { // TODO: After WSDL4J supports soap12, change this code UnknownExtensibilityElement unkElement = (UnknownExtensibilityElement) obj; QName name = unkElement.getElementType(); if (name.getNamespaceURI().equals(Constants.URI_WSDL12_SOAP) && name.getLocalPart().equals("operation")) { if (unkElement.getElement().getAttribute("soapAction") != null) { soapAction = unkElement.getElement().getAttribute("soapAction"); } opStyle = unkElement.getElement().getAttribute("style"); } } } Operation ptOperation = operation.getOperation(); OperationType type = ptOperation.getStyle(); // These operation types are not supported. The signature // will be a string stating that fact. if ((OperationType.NOTIFICATION.equals(type)) || (OperationType.SOLICIT_RESPONSE.equals(type))) { pw.println(parameters.signature); pw.println(); } else { writeOperation(pw, operation, parameters, soapAction, opStyle, type == OperationType.ONE_WAY, i); } } }
From source file:cz.cas.lib.proarc.webapp.server.rest.DigitalObjectResource.java
private List<Item> addMembers(DigitalObjectHandler parent, List<String> toAddPids, Map<String, Item> memberSearchMap) throws DigitalObjectException { String parentPid = parent.getFedoraObject().getPid(); HashSet<String> toAddPidSet = new HashSet<String>(toAddPids); ArrayList<Item> added = new ArrayList<Item>(toAddPidSet.size()); if (toAddPidSet.isEmpty()) { return added; }/* w w w. j av a 2s .c o m*/ RelationEditor editor = parent.relations(); List<String> members = editor.getMembers(); // add new members for (String addPid : toAddPids) { if (!members.contains(addPid)) { members.add(addPid); Item item = memberSearchMap.get(addPid); if (item == null) { throw RestException.plainNotFound("pid", toAddPidSet.toString()); } item.setParentPid(parentPid); added.add(item); } else { throw RestException.plainText(Status.BAD_REQUEST, parentPid + " already contains: " + addPid); } } // write if any change if (!added.isEmpty()) { editor.setMembers(members); editor.write(editor.getLastModified(), session.asFedoraLog()); } return added; }
From source file:org.jactr.core.module.declarative.search.local.DefaultSearchSystem.java
protected Collection<IChunk> find(IConditionalSlot conditionalSlot) { HashSet<IChunk> rtn = new HashSet<IChunk>(); switch (conditionalSlot.getCondition()) { case IConditionalSlot.EQUALS: rtn.addAll(equals(conditionalSlot)); break;//from w w w.j a v a 2 s .c o m case IConditionalSlot.GREATER_THAN: rtn.addAll(greaterThan(conditionalSlot)); break; case IConditionalSlot.GREATER_THAN_EQUALS: rtn.addAll(greaterThan(conditionalSlot)); rtn.addAll(equals(conditionalSlot)); break; case IConditionalSlot.LESS_THAN: rtn.addAll(lessThan(conditionalSlot)); break; case IConditionalSlot.LESS_THAN_EQUALS: rtn.addAll(lessThan(conditionalSlot)); rtn.addAll(equals(conditionalSlot)); break; case IConditionalSlot.NOT_EQUALS: rtn.addAll(not(conditionalSlot)); break; case IConditionalSlot.WITHIN: default: if (LOGGER.isWarnEnabled()) LOGGER.warn("No clue what to do with this search condition " + conditionalSlot); } if (LOGGER.isDebugEnabled()) LOGGER.debug("Search for " + conditionalSlot + " yielded " + rtn.size() + " results"); return rtn; }
From source file:org.apache.hadoop.chukwa.analysis.salsa.visualization.Heatmap.java
/** * Interfaces with database to get data and * populate data structures for rendering *//*from ww w.j av a 2 s .co m*/ public HeatmapData getData() { // preliminary setup OfflineTimeHandler time_offline; TimeHandler time_online; long start, end, min, max; if (offline_use) { time_offline = new OfflineTimeHandler(param_map, this.timezone); start = time_offline.getStartTime(); end = time_offline.getEndTime(); } else { time_online = new TimeHandler(this.request, this.timezone); start = time_online.getStartTime(); end = time_online.getEndTime(); } DatabaseWriter dbw = new DatabaseWriter(this.cluster); // setup query String query; if (this.query_state != null && this.query_state.equals("read")) { query = "select block_id,start_time,finish_time,start_time_millis,finish_time_millis,status,state_name,hostname,other_host,bytes from [" + table + "] where finish_time between '[start]' and '[end]' and (state_name like 'read_local' or state_name like 'read_remote')"; } else if (this.query_state != null && this.query_state.equals("write")) { query = "select block_id,start_time,finish_time,start_time_millis,finish_time_millis,status,state_name,hostname,other_host,bytes from [" + table + "] where finish_time between '[start]' and '[end]' and (state_name like 'write_local' or state_name like 'write_remote' or state_name like 'write_replicated')"; } else { query = "select block_id,start_time,finish_time,start_time_millis,finish_time_millis,status,state_name,hostname,other_host,bytes from [" + table + "] where finish_time between '[start]' and '[end]' and state_name like '" + query_state + "'"; } Macro mp = new Macro(start, end, query); query = mp.toString() + " order by start_time"; ArrayList<HashMap<String, Object>> events = new ArrayList<HashMap<String, Object>>(); ResultSet rs = null; log.debug("Query: " + query); // run query, extract results try { rs = dbw.query(query); ResultSetMetaData rmeta = rs.getMetaData(); int col = rmeta.getColumnCount(); while (rs.next()) { HashMap<String, Object> event = new HashMap<String, Object>(); long event_time = 0; for (int i = 1; i <= col; i++) { if (rmeta.getColumnType(i) == java.sql.Types.TIMESTAMP) { event.put(rmeta.getColumnName(i), rs.getTimestamp(i).getTime()); } else { event.put(rmeta.getColumnName(i), rs.getString(i)); } } events.add(event); } } catch (SQLException ex) { // handle any errors log.error("SQLException: " + ex.getMessage()); log.error("SQLState: " + ex.getSQLState()); log.error("VendorError: " + ex.getErrorCode()); } finally { dbw.close(); } SimpleDateFormat format = new SimpleDateFormat("MMM dd yyyy HH:mm:ss"); log.info(events.size() + " results returned."); HashSet<String> host_set = new HashSet<String>(); HashMap<String, Integer> host_indices = new HashMap<String, Integer>(); HashMap<Integer, String> host_rev_indices = new HashMap<Integer, String>(); // collect hosts, name unique hosts for (int i = 0; i < events.size(); i++) { HashMap<String, Object> event = events.get(i); String curr_host = (String) event.get("hostname"); String other_host = (String) event.get("other_host"); host_set.add(curr_host); host_set.add(other_host); } int num_hosts = host_set.size(); Iterator<String> host_iter = host_set.iterator(); for (int i = 0; i < num_hosts && host_iter.hasNext(); i++) { String curr_host = host_iter.next(); host_indices.put(curr_host, new Integer(i)); host_rev_indices.put(new Integer(i), curr_host); } System.out.println("Number of hosts: " + num_hosts); long stats[][] = new long[num_hosts][num_hosts]; long count[][] = new long[num_hosts][num_hosts]; // used for averaging int start_millis = 0, end_millis = 0; // deliberate design choice to duplicate code PER possible operation // otherwise we have to do the mode check N times, for N states returned // // compute aggregate statistics log.info("Query statistic type: " + this.query_stat_type); if (this.query_stat_type.equals("transaction_count")) { for (int i = 0; i < events.size(); i++) { HashMap<String, Object> event = events.get(i); start = (Long) event.get("start_time"); end = (Long) event.get("finish_time"); start_millis = Integer.parseInt(((String) event.get("start_time_millis"))); end_millis = Integer.parseInt(((String) event.get("finish_time_millis"))); String cell = (String) event.get("state_name"); String this_host = (String) event.get("hostname"); String other_host = (String) event.get("other_host"); int this_host_idx = host_indices.get(this_host).intValue(); int other_host_idx = host_indices.get(other_host).intValue(); // to, from stats[other_host_idx][this_host_idx] += 1; } } else if (this.query_stat_type.equals("avg_duration")) { for (int i = 0; i < events.size(); i++) { HashMap<String, Object> event = events.get(i); start = (Long) event.get("start_time"); end = (Long) event.get("finish_time"); start_millis = Integer.parseInt(((String) event.get("start_time_millis"))); end_millis = Integer.parseInt(((String) event.get("finish_time_millis"))); String cell = (String) event.get("state_name"); String this_host = (String) event.get("hostname"); String other_host = (String) event.get("other_host"); int this_host_idx = host_indices.get(this_host).intValue(); int other_host_idx = host_indices.get(other_host).intValue(); long curr_val = end_millis - start_millis + ((end - start) * 1000); // to, from stats[other_host_idx][this_host_idx] += curr_val; count[other_host_idx][this_host_idx] += 1; } for (int i = 0; i < num_hosts; i++) { for (int j = 0; j < num_hosts; j++) { if (count[i][j] > 0) stats[i][j] = stats[i][j] / count[i][j]; } } } else if (this.query_stat_type.equals("avg_volume")) { for (int i = 0; i < events.size(); i++) { HashMap<String, Object> event = events.get(i); start = (Long) event.get("start_time"); end = (Long) event.get("finish_time"); start_millis = Integer.parseInt(((String) event.get("start_time_millis"))); end_millis = Integer.parseInt(((String) event.get("finish_time_millis"))); String cell = (String) event.get("state_name"); String this_host = (String) event.get("hostname"); String other_host = (String) event.get("other_host"); int this_host_idx = host_indices.get(this_host).intValue(); int other_host_idx = host_indices.get(other_host).intValue(); long curr_val = Long.parseLong((String) event.get("bytes")); // to, from stats[other_host_idx][this_host_idx] += curr_val; count[other_host_idx][this_host_idx] += 1; } for (int i = 0; i < num_hosts; i++) { for (int j = 0; j < num_hosts; j++) { if (count[i][j] > 0) stats[i][j] = stats[i][j] / count[i][j]; } } } else if (this.query_stat_type.equals("total_duration")) { for (int i = 0; i < events.size(); i++) { HashMap<String, Object> event = events.get(i); start = (Long) event.get("start_time"); end = (Long) event.get("finish_time"); start_millis = Integer.parseInt(((String) event.get("start_time_millis"))); end_millis = Integer.parseInt(((String) event.get("finish_time_millis"))); String cell = (String) event.get("state_name"); String this_host = (String) event.get("hostname"); String other_host = (String) event.get("other_host"); int this_host_idx = host_indices.get(this_host).intValue(); int other_host_idx = host_indices.get(other_host).intValue(); double curr_val = end_millis - start_millis + ((end - start) * 1000); // to, from stats[other_host_idx][this_host_idx] += curr_val; } } else if (this.query_stat_type.equals("total_volume")) { for (int i = 0; i < events.size(); i++) { HashMap<String, Object> event = events.get(i); start = (Long) event.get("start_time"); end = (Long) event.get("finish_time"); start_millis = Integer.parseInt(((String) event.get("start_time_millis"))); end_millis = Integer.parseInt(((String) event.get("finish_time_millis"))); String cell = (String) event.get("state_name"); String this_host = (String) event.get("hostname"); String other_host = (String) event.get("other_host"); int this_host_idx = host_indices.get(this_host).intValue(); int other_host_idx = host_indices.get(other_host).intValue(); long curr_val = Long.parseLong((String) event.get("bytes")); // to, from stats[other_host_idx][this_host_idx] += curr_val; } } int[] permute = null; if (sort_nodes) { permute = hClust(stats); stats = doPermute(stats, permute); } Table agg_tab = new Table(); agg_tab.addColumn("stat", long.class); min = Long.MAX_VALUE; max = Long.MIN_VALUE; agg_tab.addRows(num_hosts * num_hosts); // row-wise placement (row1, followed by row2, etc.) for (int i = 0; i < num_hosts; i++) { for (int j = 0; j < num_hosts; j++) { agg_tab.setLong((i * num_hosts) + j, "stat", stats[i][j]); if (stats[i][j] > max) max = stats[i][j]; if (stats[i][j] > 0 && stats[i][j] < min) min = stats[i][j]; } } if (min == Long.MAX_VALUE) min = 0; log.info(agg_tab); // collate data HeatmapData hd = new HeatmapData(); hd.stats = new long[num_hosts][num_hosts]; hd.stats = stats; hd.min = min; hd.max = max; hd.num_hosts = num_hosts; hd.agg_tab = agg_tab; this.add_info_extra = new String("\nState: " + this.prettyStateNames.get(this.query_state) + " (" + events.size() + " " + this.query_state + "'s [" + this.query_stat_type + "])\n" + "Plotted value range: [" + hd.min + "," + hd.max + "] (Zeros in black)"); hd.hostnames = new String[num_hosts]; for (int i = 0; i < num_hosts; i++) { String curr_host = host_rev_indices.get(new Integer(permute[i])); if (sort_nodes) { hd.hostnames[i] = new String(curr_host); } else { hd.hostnames[i] = new String(curr_host); } } return hd; }