Example usage for java.util TreeMap values

List of usage examples for java.util TreeMap values

Introduction

In this page you can find the example usage for java.util TreeMap values.

Prototype

public Collection<V> values() 

Source Link

Document

Returns a Collection view of the values contained in this map.

Usage

From source file:com.impetus.ankush2.framework.monitor.AbstractMonitor.java

/**
 * Memoryheatmap./* ww  w  .  j  av a  2 s  .c  om*/
 */
public void memoryheatmap() {
    // cluster nodes
    Set<Node> nodes = dbCluster.getNodes(); // dbCluster.getSortedNodesByIp();
    // list of state which needs to be included for creating the heat map
    // data.
    List<String> includeStates = new ArrayList<String>();
    includeStates.add(com.impetus.ankush2.constant.Constant.Node.State.DEPLOYED.toString());
    // adding removing state for node
    includeStates.add(com.impetus.ankush2.constant.Constant.Node.State.REMOVING.toString()); // heat map data object.
    TreeMap heatMapData = new TreeMap();
    // iterating over the nodes
    for (Node node : nodes) {

        // If state id adding then no need to send the heat map data.
        if (!includeStates.contains(node.getState())) {
            continue;
        }

        // getting node monitoring data.
        NodeMonitoring nodeMonitoring = new MonitoringManager().getMonitoringData(node.getId());

        String usageValue = null;
        // if node monitoring, its monitoring info and its up time info is
        // not null
        if (nodeMonitoring != null && nodeMonitoring.getMonitoringInfo() != null
                && nodeMonitoring.getMonitoringInfo().getMemoryInfos() != null) {
            // get usage value.
            Double usageValueDouble = nodeMonitoring.getMonitoringInfo().getMemoryInfos().get(0)
                    .getUsedPercentage();
            // current usage value.
            if (usageValueDouble != null) {
                usageValue = formator.format(usageValueDouble).toString();
            }
        }

        // Getting the status value for the CPU Usage
        DBEventManager eventManager = new DBEventManager();
        Event event = eventManager.getEvent(null, node.getPublicIp(), null,
                com.impetus.ankush2.constant.Constant.Component.Name.AGENT, Constant.Alerts.Metric.MEMORY,
                null);

        // Getting the severity value.
        String status = Event.Severity.NORMAL.toString();
        if (event != null) {
            status = event.getSeverity().toString();
        }

        // if agent is down making status as unavailable.
        if (DBServiceManager.getManager().isAgentDown(node.getPublicIp())) {
            usageValue = "0";
            status = Constant.Alerts.Severity.UNAVAILABLE;
        }

        // Getting rack info for node.
        String rackId = getRackId(node);
        // update the rack heat map data and put it in main heat map data.
        heatMapData.put(rackId, updateRackHeatMapData(rackId, node, usageValue, status, heatMapData));
    }
    // setting rack info in map.
    result.put(com.impetus.ankush2.constant.Constant.Keys.RACKINFO, heatMapData.values());
    // setting total rack.
    result.put(com.impetus.ankush2.constant.Constant.Keys.TOTALRACKS, heatMapData.size());
}

From source file:com.impetus.ankush2.framework.monitor.AbstractMonitor.java

/**
 * Cpu heat map.// ww  w.j av  a  2 s  . c  o m
 */
public void cpuheatmap() {
    // cluster nodes
    Set<Node> nodes = dbCluster.getNodes();// getSortedNodesByIp();
    // list of state which needs to be included for creating the heat map
    // data.
    List<String> includeStates = new ArrayList<String>();
    // adding deployed state for node
    includeStates.add(com.impetus.ankush2.constant.Constant.Node.State.DEPLOYED.toString());
    // adding removing state for node
    includeStates.add(com.impetus.ankush2.constant.Constant.Node.State.REMOVING.toString());
    // heat map data object.
    TreeMap heatMapData = new TreeMap();
    // iterating over the nodes.
    for (Node node : nodes) {

        // if the node state is available in including state list.
        if (!includeStates.contains(node.getState())) {
            continue;
        }
        // node monitoring object.
        NodeMonitoring nodeMonitoring = new MonitoringManager().getMonitoringData(node.getId());

        // usage value.
        String usageValue = null;
        // if node monitoring, its monitoring info and its up time info is
        // not null
        if (nodeMonitoring != null && nodeMonitoring.getMonitoringInfo() != null
                && nodeMonitoring.getMonitoringInfo().getUptimeInfos() != null) {
            // get usage value.
            Double usageValueDouble = nodeMonitoring.getMonitoringInfo().getUptimeInfos().get(0).getCpuUsage();
            // current usage value.
            if (usageValueDouble != null) {
                usageValue = formator.format(usageValueDouble).toString();
            }
        }

        // Getting the status value for the CPU Usage
        DBEventManager eventManager = new DBEventManager();
        // Getting the event for the node.
        Event event = eventManager.getEvent(null, node.getPublicIp(), null,
                com.impetus.ankush2.constant.Constant.Component.Name.AGENT, Constant.Alerts.Metric.CPU, null);

        // Getting the severity value.
        String status = Event.Severity.NORMAL.toString();
        if (event != null) {
            status = event.getSeverity().toString();
        }

        // if agent is down making status as unavailable.
        if (DBServiceManager.getManager().isAgentDown(node.getPublicIp())) {
            usageValue = "0";
            status = Constant.Alerts.Severity.UNAVAILABLE;
        }
        // Getting rack info for node.
        String rackId = getRackId(node);
        // update the rack heat map data and put it in main heat map data.
        heatMapData.put(rackId, updateRackHeatMapData(rackId, node, usageValue, status, heatMapData));
    }
    // setting rack info in map.
    result.put(com.impetus.ankush2.constant.Constant.Keys.RACKINFO, heatMapData.values());
    // setting total rack.
    result.put(com.impetus.ankush2.constant.Constant.Keys.TOTALRACKS, heatMapData.size());
}

From source file:org.apache.hadoop.mapred.TaskTracker.java

/**
 * Close down the TaskTracker and all its components.  We must also shutdown
 * any running tasks or threads, and cleanup disk space.  A new TaskTracker
 * within the same process space might be restarted, so everything must be
 * clean.//from   w  w w. ja  v  a 2  s .  co m
 * @throws InterruptedException 
 */
public synchronized void close() throws IOException, InterruptedException {
    //
    // Kill running tasks.  Do this in a 2nd vector, called 'tasksToClose',
    // because calling jobHasFinished() may result in an edit to 'tasks'.
    //
    TreeMap<TaskAttemptID, TaskInProgress> tasksToClose = new TreeMap<TaskAttemptID, TaskInProgress>();
    tasksToClose.putAll(tasks);
    for (TaskInProgress tip : tasksToClose.values()) {
        tip.jobHasFinished(false);
    }

    this.running = false;

    // Clear local storage
    cleanupStorage();

    // Shutdown the fetcher thread
    this.mapEventsFetcher.interrupt();

    //stop the launchers
    this.mapLauncher.interrupt();
    this.reduceLauncher.interrupt();

    this.distributedCacheManager.stopCleanupThread();
    jvmManager.stop();

    // shutdown RPC connections
    RPC.stopProxy(jobClient);

    // wait for the fetcher thread to exit
    for (boolean done = false; !done;) {
        try {
            this.mapEventsFetcher.join();
            done = true;
        } catch (InterruptedException e) {
        }
    }

    if (taskReportServer != null) {
        taskReportServer.stop();
        taskReportServer = null;
    }
    if (healthChecker != null) {
        //stop node health checker service
        healthChecker.stop();
        healthChecker = null;
    }
    if (jettyBugMonitor != null) {
        jettyBugMonitor.shutdown();
        jettyBugMonitor = null;
    }
}

From source file:org.jasig.ssp.web.api.reports.PersonHistoryReportController.java

public static List<StudentHistoryTO> sort(final Set<EarlyAlertTO> earlyAlerts,
        final Map<String, List<TaskTO>> taskMap, final List<JournalEntryTO> journalEntries) {

    //TreeMap assures modified date order is preserved (sorted by modified date descending)
    final TreeMap<Date, StudentHistoryTO> studentHistoryMap = new TreeMap(new Comparator<Date>() {
        public int compare(Date o1, Date o2) {
            return sortDateDescending(o1, o2);
        }/*from  ww w. ja  v  a  2 s . c o  m*/
    });

    //Sort early alerts by modified date descending
    final List<EarlyAlertTO> earlyAlertsSorted = new ArrayList(earlyAlerts);
    Collections.sort(earlyAlertsSorted, new Comparator<EarlyAlertTO>() {
        @Override
        public int compare(final EarlyAlertTO o1, final EarlyAlertTO o2) {
            return sortDateDescending(o1.getModifiedDate(), o2.getModifiedDate());
        }
    });

    //Sort journal entries by modified date descending
    final List journalEntriesSorted = journalEntries;
    Collections.sort(journalEntriesSorted, new Comparator<JournalEntryTO>() {
        @Override
        public int compare(final JournalEntryTO o1, final JournalEntryTO o2) {
            return sortDateDescending(o1.getModifiedDate(), o2.getModifiedDate());
        }
    });

    //First, iterate over each EarlyAlertTO, looking for matching dates in the PersonHistoryTO
    final Iterator<EarlyAlertTO> alertIter = earlyAlertsSorted.iterator();
    while (alertIter.hasNext()) {
        final EarlyAlertTO thisEarlyAlertTO = alertIter.next();
        final Date snewDate = DateTimeUtils.midnightOn(thisEarlyAlertTO.getModifiedDate());

        if (studentHistoryMap.containsKey(snewDate)) {
            final StudentHistoryTO studentHistoryTO = studentHistoryMap.get(snewDate);
            studentHistoryTO.addEarlyAlertTO(thisEarlyAlertTO);

        } else {
            final StudentHistoryTO thisStudentHistoryTO = new StudentHistoryTO(
                    getDateFormatter().format(snewDate));
            thisStudentHistoryTO.addEarlyAlertTO(thisEarlyAlertTO);
            studentHistoryMap.put(snewDate, thisStudentHistoryTO);
        }
    }

    //Second, iterate over each JournalEntryTO, looking for matching dates in the PersonHistoryTO
    final Iterator<JournalEntryTO> journalEntryIter = journalEntriesSorted.iterator();
    while (journalEntryIter.hasNext()) {
        final JournalEntryTO thisJournalEntryTO = journalEntryIter.next();
        final Date snewDate = DateTimeUtils.midnightOn(thisJournalEntryTO.getModifiedDate());

        if (studentHistoryMap.containsKey(snewDate)) {
            final StudentHistoryTO studentHistoryTO = studentHistoryMap.get(snewDate);
            studentHistoryTO.addJournalEntryTO(thisJournalEntryTO);
        } else {
            final StudentHistoryTO thisStudentHistoryTO = new StudentHistoryTO(
                    getDateFormatter().format(snewDate));
            thisStudentHistoryTO.addJournalEntryTO(thisJournalEntryTO);
            studentHistoryMap.put(snewDate, thisStudentHistoryTO);
        }
    }

    // Per the API, the tasks are already broken down into a map, sorted by group.
    //    We want to maintain this grouping, but sort these based date
    //Third, iterate over each TaskTO in each group, looking for matching dates in the PersonHistoryTO
    for (final Map.Entry<String, List<TaskTO>> entry : taskMap.entrySet()) {
        final String groupName = entry.getKey();
        final List<TaskTO> tasksSorted = entry.getValue();

        //Sort tasks by modified date descending
        Collections.sort(tasksSorted, new Comparator<TaskTO>() {
            @Override
            public int compare(final TaskTO o1, final TaskTO o2) {
                return sortDateDescending(o1.getModifiedDate(), o2.getModifiedDate());
            }
        });

        final Iterator<TaskTO> taskIter = tasksSorted.iterator();
        while (taskIter.hasNext()) {
            final TaskTO thisTask = taskIter.next();
            final Date snewDate = DateTimeUtils.midnightOn(thisTask.getModifiedDate());

            if (studentHistoryMap.containsKey(snewDate)) {
                final StudentHistoryTO studentHistoryTO = studentHistoryMap.get(snewDate);
                studentHistoryTO.addTask(groupName, thisTask);
            } else {
                final StudentHistoryTO thisStudentHistoryTO = new StudentHistoryTO(
                        getDateFormatter().format(snewDate));
                thisStudentHistoryTO.addTask(groupName, thisTask);
                studentHistoryMap.put(snewDate, thisStudentHistoryTO);
            }
        }
    }

    // at this point, we should have a StudentHistoryTO map with Dates
    final Collection<StudentHistoryTO> studentHistoryTOs = studentHistoryMap.values();

    final List<StudentHistoryTO> retVal = new ArrayList<StudentHistoryTO>();
    final Iterator<StudentHistoryTO> studentHistoryTOIter = studentHistoryTOs.iterator();
    while (studentHistoryTOIter.hasNext()) {
        final StudentHistoryTO currentStudentHistoryTO = studentHistoryTOIter.next();
        currentStudentHistoryTO.createTaskList();
        retVal.add(currentStudentHistoryTO);
    }

    return retVal;
}

From source file:org.osaf.cosmo.calendar.EntityConverter.java

protected Calendar getCalendarFromEventStamp(EventStamp stamp) {
    Calendar masterCal = CalendarUtils.copyCalendar(stamp.getEventCalendar());
    if (masterCal == null)
        return null;

    // the master calendar might not have any events; for
    // instance, a client might be trying to save a VTODO
    if (masterCal.getComponents(Component.VEVENT).isEmpty())
        return masterCal;

    VEvent masterEvent = (VEvent) masterCal.getComponents(Component.VEVENT).get(0);
    VAlarm masterAlarm = getDisplayAlarm(masterEvent);
    String masterLocation = stamp.getLocation();

    // build timezone map that includes all timezones in master calendar
    ComponentList timezones = masterCal.getComponents(Component.VTIMEZONE);
    HashMap<String, VTimeZone> tzMap = new HashMap<String, VTimeZone>();
    for (Iterator it = timezones.iterator(); it.hasNext();) {
        VTimeZone vtz = (VTimeZone) it.next();
        tzMap.put(vtz.getTimeZoneId().getValue(), vtz);
    }/* w w w  . j  av a 2 s.c o  m*/

    // check start/end date tz is included, and add if it isn't
    String tzid = getTzId(stamp.getStartDate());
    if (tzid != null && !tzMap.containsKey(tzid)) {
        TimeZone tz = TIMEZONE_REGISTRY.getTimeZone(tzid);
        if (tz != null) {
            VTimeZone vtz = tz.getVTimeZone();
            masterCal.getComponents().add(0, vtz);
            tzMap.put(tzid, vtz);
        }
    }

    tzid = getTzId(stamp.getEndDate());
    if (tzid != null && !tzMap.containsKey(tzid)) {
        TimeZone tz = TIMEZONE_REGISTRY.getTimeZone(tzid);
        if (tz != null) {
            VTimeZone vtz = tz.getVTimeZone();
            masterCal.getComponents().add(0, vtz);
            tzMap.put(tzid, vtz);
        }
    }

    // merge item properties to icalendar props
    mergeCalendarProperties(masterEvent, (NoteItem) stamp.getItem());

    // bug 9606: handle displayAlarm with no trigger by not including
    // in exported icalendar
    if (masterAlarm != null) {
        if (stamp.getDisplayAlarmTrigger() == null) {
            masterEvent.getAlarms().remove(masterAlarm);
            masterAlarm = null;
        }
    }

    // If event is not recurring, skip all the event modification
    // processing
    if (!stamp.isRecurring())
        return masterCal;

    // add all exception events
    NoteItem note = (NoteItem) stamp.getItem();
    TreeMap<String, VEvent> sortedMap = new TreeMap<String, VEvent>();
    for (NoteItem exception : note.getModifications()) {
        EventExceptionStamp exceptionStamp = HibEventExceptionStamp.getStamp(exception);

        // if modification isn't stamped as an event then ignore
        if (exceptionStamp == null)
            continue;

        // Get exception event copy
        VEvent exceptionEvent = (VEvent) CalendarUtils.copyComponent(exceptionStamp.getExceptionEvent());

        // ensure DURATION or DTEND exists on modfication
        if (ICalendarUtils.getDuration(exceptionEvent) == null) {
            ICalendarUtils.setDuration(exceptionEvent, ICalendarUtils.getDuration(masterEvent));
        }

        // merge item properties to icalendar props
        mergeCalendarProperties(exceptionEvent, exception);

        // check for inherited anyTime
        if (exceptionStamp.isAnyTime() == null) {
            DtStart modDtStart = exceptionEvent.getStartDate();
            // remove "missing" value
            modDtStart.getParameters().remove(modDtStart.getParameter(ICalendarConstants.PARAM_X_OSAF_ANYTIME));
            // add inherited value
            if (stamp.isAnyTime()) {
                modDtStart.getParameters().add(getAnyTimeXParam());
            }
        }

        // Check for inherited displayAlarm, which is represented
        // by a valarm with no TRIGGER
        VAlarm displayAlarm = getDisplayAlarm(exceptionEvent);
        if (displayAlarm != null && exceptionStamp.getDisplayAlarmTrigger() == null) {
            exceptionEvent.getAlarms().remove(displayAlarm);
            if (masterAlarm != null)
                exceptionEvent.getAlarms().add(masterAlarm);
        }

        // Check for inherited LOCATION which is represented as null LOCATION
        // If inherited, and master event has a LOCATION, then add it to exception
        if (exceptionStamp.getLocation() == null && masterLocation != null) {
            ICalendarUtils.setLocation(masterLocation, exceptionEvent);
        }

        sortedMap.put(exceptionStamp.getRecurrenceId().toString(), exceptionEvent);

        // verify that timezones are present for exceptions, and add if not
        tzid = getTzId(exceptionStamp.getStartDate());
        if (tzid != null && !tzMap.containsKey(tzid)) {
            TimeZone tz = TIMEZONE_REGISTRY.getTimeZone(tzid);
            if (tz != null) {
                VTimeZone vtz = tz.getVTimeZone();
                masterCal.getComponents().add(0, vtz);
                tzMap.put(tzid, vtz);
            }
        }

        tzid = getTzId(exceptionStamp.getEndDate());
        if (tzid != null && !tzMap.containsKey(tzid)) {
            TimeZone tz = TIMEZONE_REGISTRY.getTimeZone(tzid);
            if (tz != null) {
                VTimeZone vtz = tz.getVTimeZone();
                masterCal.getComponents().add(0, vtz);
                tzMap.put(tzid, vtz);
            }
        }
    }

    masterCal.getComponents().addAll(sortedMap.values());

    return masterCal;
}

From source file:com.joliciel.talismane.parser.TransitionBasedGlobalLearningParser.java

public List<ParseConfiguration> parseSentence(List<PosTagSequence> posTagSequences,
        FeatureWeightVector weightVector, RankingSolution correctSolution) {
    MONITOR.startTask("parseSentence");
    try {/*  www  .j a v a 2  s  .c  o  m*/
        long startTime = (new Date()).getTime();
        int maxAnalysisTimeMilliseconds = maxAnalysisTimePerSentence * 1000;
        int minFreeMemoryBytes = minFreeMemory * KILOBYTE;

        TokenSequence tokenSequence = posTagSequences.get(0).getTokenSequence();

        TreeMap<Integer, TreeSet<ParseConfiguration>> heaps = new TreeMap<Integer, TreeSet<ParseConfiguration>>();

        TreeSet<ParseConfiguration> heap0 = new TreeSet<ParseConfiguration>();
        for (PosTagSequence posTagSequence : posTagSequences) {
            // add an initial ParseConfiguration for each postag sequence
            ParseConfiguration initialConfiguration = this.getParserServiceInternal()
                    .getInitialConfiguration(posTagSequence);
            initialConfiguration.setScoringStrategy(new SimpleRankingScoringStrategy());
            initialConfiguration.setRankingScore(0.0);
            heap0.add(initialConfiguration);
            if (LOG.isDebugEnabled()) {
                LOG.debug("Adding initial posTagSequence: " + posTagSequence);
            }
        }
        heaps.put(0, heap0);
        TreeSet<ParseConfiguration> backupHeap = null;

        TreeSet<ParseConfiguration> finalHeap = null;
        while (heaps.size() > 0) {
            Entry<Integer, TreeSet<ParseConfiguration>> heapEntry = heaps.firstEntry();
            TreeSet<ParseConfiguration> currentHeap = heapEntry.getValue();
            int currentHeapIndex = heapEntry.getKey();
            if (LOG.isTraceEnabled()) {
                LOG.trace("##### Polling next heap: " + heapEntry.getKey() + ", size: "
                        + heapEntry.getValue().size());
            }

            boolean finished = false;
            // systematically set the final heap here, just in case we exit "naturally" with no more heaps
            finalHeap = heapEntry.getValue();
            backupHeap = new TreeSet<ParseConfiguration>();

            // we jump out when either (a) all tokens have been attached or (b) we go over the max alloted time
            ParseConfiguration topConf = currentHeap.first();
            if (topConf.isTerminal()) {
                LOG.trace("Exiting with terminal heap: " + heapEntry.getKey() + ", size: "
                        + heapEntry.getValue().size());
                finished = true;
            }

            // check if we've gone over alloted time for this sentence
            long analysisTime = (new Date()).getTime() - startTime;
            if (maxAnalysisTimePerSentence > 0 && analysisTime > maxAnalysisTimeMilliseconds) {
                LOG.info("Parse tree analysis took too long for sentence: " + tokenSequence.getText());
                LOG.info("Breaking out after " + maxAnalysisTimePerSentence + " seconds.");
                finished = true;
            }

            // check if we've enough memory to process this sentence
            if (minFreeMemory > 0) {
                long freeMemory = Runtime.getRuntime().freeMemory();
                if (freeMemory < minFreeMemoryBytes) {
                    LOG.info("Not enough memory left to parse sentence: " + tokenSequence.getText());
                    LOG.info("Min free memory (bytes):" + minFreeMemoryBytes);
                    LOG.info("Current free memory (bytes): " + freeMemory);
                    finished = true;
                }
            }

            // check if any of the remaining top-N solutions on any heap can lead to the correct solution
            if (correctSolution != null) {
                boolean canReachCorrectSolution = false;
                for (TreeSet<ParseConfiguration> heap : heaps.values()) {
                    int j = 1;
                    for (ParseConfiguration solution : heap) {
                        if (j > beamWidth)
                            break;
                        if (solution.canReach(correctSolution)) {
                            canReachCorrectSolution = true;
                            break;
                        }
                        j++;
                    }
                    if (canReachCorrectSolution)
                        break;
                }
                if (!canReachCorrectSolution) {
                    LOG.debug("None of the solutions on the heap can reach the gold solution. Exiting.");
                    finished = true;
                }
            }

            if (finished) {
                // combine any remaining heaps
                for (TreeSet<ParseConfiguration> heap : heaps.values()) {
                    if (finalHeap != heap) {
                        finalHeap.addAll(heap);
                    }
                }
                break;
            }

            // remove heap from set of heaps
            heapEntry = heaps.pollFirstEntry();

            // limit the breadth to K
            int maxSolutions = currentHeap.size() > this.beamWidth ? this.beamWidth : currentHeap.size();

            int j = 0;
            while (currentHeap.size() > 0) {
                ParseConfiguration history = currentHeap.pollFirst();
                backupHeap.add(history);
                if (LOG.isTraceEnabled()) {
                    LOG.trace("### Next configuration on heap " + heapEntry.getKey() + ":");
                    LOG.trace(history.toString());
                    LOG.trace("Score: " + df.format(history.getScore()));
                    LOG.trace(history.getPosTagSequence());
                }

                Set<Transition> transitions = new HashSet<Transition>();

                // test the positive rules on the current configuration
                boolean ruleApplied = false;
                if (parserPositiveRules != null) {
                    MONITOR.startTask("check rules");
                    try {
                        for (ParserRule rule : parserPositiveRules) {
                            if (LOG.isTraceEnabled()) {
                                LOG.trace("Checking rule: " + rule.getCondition().getName());
                            }
                            RuntimeEnvironment env = this.featureService.getRuntimeEnvironment();
                            FeatureResult<Boolean> ruleResult = rule.getCondition().check(history, env);
                            if (ruleResult != null && ruleResult.getOutcome()) {
                                transitions.add(rule.getTransition());
                                ruleApplied = true;
                                if (LOG.isTraceEnabled()) {
                                    LOG.trace("Rule applies. Setting transition to: "
                                            + rule.getTransition().getCode());
                                }

                                if (!rule.getTransition().checkPreconditions(history)) {
                                    LOG.error("Cannot apply rule, preconditions not met.");
                                    ruleApplied = false;
                                }
                                break;
                            }
                        }
                    } finally {
                        MONITOR.endTask("check rules");
                    }
                }

                if (!ruleApplied) {
                    transitions = parsingConstrainer.getPossibleTransitions(history);

                    Set<Transition> eliminatedTransitions = new HashSet<Transition>();
                    for (Transition transition : transitions) {
                        if (!transition.checkPreconditions(history)) {
                            eliminatedTransitions.add(transition);
                        }
                    }
                    transitions.removeAll(eliminatedTransitions);

                    // apply the negative rules
                    eliminatedTransitions = new HashSet<Transition>();
                    if (parserNegativeRules != null) {
                        MONITOR.startTask("check negative rules");
                        try {
                            for (ParserRule rule : parserNegativeRules) {
                                if (LOG.isTraceEnabled()) {
                                    LOG.trace("Checking negative rule: " + rule.getCondition().getName());
                                }
                                RuntimeEnvironment env = this.featureService.getRuntimeEnvironment();
                                FeatureResult<Boolean> ruleResult = rule.getCondition().check(history, env);
                                if (ruleResult != null && ruleResult.getOutcome()) {
                                    eliminatedTransitions.add(rule.getTransition());
                                    if (LOG.isTraceEnabled()) {
                                        LOG.debug("Rule applies. Eliminating transition: "
                                                + rule.getTransition().getCode());
                                    }
                                }
                            }

                            if (eliminatedTransitions.size() == transitions.size()) {
                                LOG.debug("All transitions eliminated! Restoring original transitions.");
                            } else {
                                transitions.removeAll(eliminatedTransitions);
                            }
                        } finally {
                            MONITOR.endTask("check negative rules");
                        }
                    }
                } // has a positive rule been applied?

                if (transitions.size() == 0) {
                    // just in case the we run out of both heaps and analyses, we build this backup heap
                    backupHeap.add(history);
                    if (LOG.isTraceEnabled())
                        LOG.trace(
                                "No transitions could be applied: not counting this solution as part of the beam");
                } else {
                    // up the counter, since we will count this solution towards the heap
                    j++;
                    // add solutions to the heap, one per valid transition
                    MONITOR.startTask("heap sort");
                    try {
                        Map<Transition, Double> deltaScorePerTransition = new HashMap<Transition, Double>();
                        double absoluteMax = 1;

                        for (Transition transition : transitions) {
                            if (LOG.isTraceEnabled()) {
                                LOG.trace("Applying transition: " + transition.getCode());
                            }
                            ParseConfiguration configuration = this.parserServiceInternal
                                    .getConfiguration(history);
                            transition.apply(configuration);
                            configuration.setRankingScore(history.getRankingScore());
                            configuration.getIncrementalFeatureResults()
                                    .addAll(history.getIncrementalFeatureResults());

                            // test the features on the new configuration
                            double scoreDelta = 0.0;
                            MONITOR.startTask("feature analyse");
                            List<FeatureResult<?>> featureResults = new ArrayList<FeatureResult<?>>();
                            try {
                                for (ParseConfigurationFeature<?> feature : this.parseFeatures) {
                                    MONITOR.startTask(feature.getName());
                                    try {
                                        RuntimeEnvironment env = this.featureService.getRuntimeEnvironment();
                                        FeatureResult<?> featureResult = feature.check(configuration, env);
                                        if (featureResult != null) {
                                            featureResults.add(featureResult);
                                            double weight = weightVector.getWeight(featureResult);
                                            scoreDelta += weight;
                                            if (LOG.isTraceEnabled()) {
                                                LOG.trace(featureResult.toString() + " = " + weight);
                                            }
                                        }
                                    } finally {
                                        MONITOR.endTask(feature.getName());
                                    }
                                }
                                configuration.getIncrementalFeatureResults().add(featureResults);
                                if (LOG.isTraceEnabled()) {
                                    LOG.trace("Score = " + configuration.getRankingScore() + " + " + scoreDelta
                                            + " = " + (configuration.getRankingScore() + scoreDelta));
                                }
                                configuration.setRankingScore(configuration.getRankingScore() + scoreDelta);
                                deltaScorePerTransition.put(transition, scoreDelta);
                                if (Math.abs(scoreDelta) > absoluteMax)
                                    absoluteMax = Math.abs(scoreDelta);

                            } finally {
                                MONITOR.endTask("feature analyse");
                            }

                            int nextHeapIndex = parseComparisonStrategy.getComparisonIndex(configuration)
                                    * 1000;
                            while (nextHeapIndex <= currentHeapIndex)
                                nextHeapIndex++;

                            TreeSet<ParseConfiguration> nextHeap = heaps.get(nextHeapIndex);
                            if (nextHeap == null) {
                                nextHeap = new TreeSet<ParseConfiguration>();
                                heaps.put(nextHeapIndex, nextHeap);
                                if (LOG.isTraceEnabled())
                                    LOG.trace("Created heap with index: " + nextHeapIndex);
                            }
                            nextHeap.add(configuration);
                            if (LOG.isTraceEnabled()) {
                                LOG.trace("Added configuration with score " + configuration.getScore()
                                        + " to heap: " + nextHeapIndex + ", total size: " + nextHeap.size());
                            }

                            configuration.clearMemory();
                        } // next transition

                        // Create a probability distribution of transitions
                        // normalise probabilities for each transition via normalised exponential
                        // e^(x/absmax)/sum(e^(x/absmax))
                        // where x/absmax is in [-1,1]
                        // e^(x/absmax) is in [1/e,e]

                        double total = 0.0;
                        for (Transition transition : deltaScorePerTransition.keySet()) {
                            double deltaScore = deltaScorePerTransition.get(transition);
                            deltaScore = Math.exp(deltaScore / absoluteMax);
                            deltaScorePerTransition.put(transition, deltaScore);
                            total += deltaScore;
                        }

                        for (Transition transition : deltaScorePerTransition.keySet()) {
                            double probability = deltaScorePerTransition.get(transition);
                            probability /= total;
                            Decision<Transition> decision = machineLearningService.createDecision(transition,
                                    probability);
                            transition.setDecision(decision);
                            if (LOG.isTraceEnabled()) {
                                LOG.trace("Transition: " + transition.getCode() + ", Prob: " + probability);
                            }
                        }

                    } finally {
                        MONITOR.endTask("heap sort");
                    }
                } // have we any transitions?

                // beam width test
                if (j == maxSolutions)
                    break;
            } // next history   
        } // next atomic index

        // return the best sequences on the heap
        List<ParseConfiguration> bestConfigurations = new ArrayList<ParseConfiguration>();
        int i = 0;

        if (finalHeap.isEmpty())
            finalHeap = backupHeap;

        while (!finalHeap.isEmpty()) {
            bestConfigurations.add(finalHeap.pollFirst());
            i++;
            if (i >= this.getBeamWidth())
                break;
        }
        if (LOG.isDebugEnabled()) {
            if (correctSolution != null) {
                LOG.debug("Gold transitions: " + correctSolution.getIncrementalOutcomes());
            }
            for (ParseConfiguration finalConfiguration : bestConfigurations) {
                LOG.debug(df.format(finalConfiguration.getScore()) + ": " + finalConfiguration.toString());
                LOG.debug("Pos tag sequence: " + finalConfiguration.getPosTagSequence());
                LOG.debug("Transitions: " + finalConfiguration.getTransitions());
                if (LOG.isTraceEnabled()) {
                    StringBuilder sb = new StringBuilder();
                    sb.append(" * PosTag sequence score ");
                    sb.append(df.format(finalConfiguration.getPosTagSequence().getScore()));
                    sb.append(" = ");
                    for (PosTaggedToken posTaggedToken : finalConfiguration.getPosTagSequence()) {
                        sb.append(" * ");
                        sb.append(df.format(posTaggedToken.getDecision().getProbability()));
                    }
                    sb.append(" root ");
                    sb.append(finalConfiguration.getPosTagSequence().size());
                    LOG.trace(sb.toString());

                    sb = new StringBuilder();
                    sb.append(" * Token sequence score = ");
                    sb.append(df.format(finalConfiguration.getPosTagSequence().getTokenSequence().getScore()));
                    LOG.trace(sb.toString());

                }
            }
        }
        return bestConfigurations;
    } finally {
        MONITOR.endTask("parseSentence");
    }
}

From source file:org.unitedinternet.cosmo.model.hibernate.EntityConverter.java

/**
 * gets calendar from event stamp.//  www  .j av a  2s  .com
 * @param stamp The event stamp.
 * @return The calendar.
 */
protected Calendar getCalendarFromEventStamp(EventStamp stamp) {
    Calendar masterCal = CalendarUtils.copyCalendar(stamp.getEventCalendar());
    if (masterCal == null) {
        return null;
    }

    // the master calendar might not have any events; for
    // instance, a client might be trying to save a VTODO
    if (masterCal.getComponents(Component.VEVENT).isEmpty()) {
        return masterCal;
    }

    VEvent masterEvent = (VEvent) masterCal.getComponents(Component.VEVENT).get(0);
    VAlarm masterAlarm = getDisplayAlarm(masterEvent);
    String masterLocation = stamp.getLocation();

    // build timezone map that includes all timezones in master calendar
    ComponentList<VTimeZone> timezones = masterCal.getComponents(Component.VTIMEZONE);
    HashMap<String, VTimeZone> tzMap = new HashMap<String, VTimeZone>();
    for (VTimeZone vtz : timezones) {
        tzMap.put(vtz.getTimeZoneId().getValue(), vtz);
    }

    // check start/end date tz is included, and add if it isn't
    String tzid = getTzId(stamp.getStartDate());
    if (tzid != null && !tzMap.containsKey(tzid)) {
        TimeZone tz = TIMEZONE_REGISTRY.getTimeZone(tzid);
        if (tz != null) {
            VTimeZone vtz = tz.getVTimeZone();
            masterCal.getComponents().add(0, vtz);
            tzMap.put(tzid, vtz);
        }
    }

    tzid = getTzId(stamp.getEndDate());
    if (tzid != null && !tzMap.containsKey(tzid)) {
        TimeZone tz = TIMEZONE_REGISTRY.getTimeZone(tzid);
        if (tz != null) {
            VTimeZone vtz = tz.getVTimeZone();
            masterCal.getComponents().add(0, vtz);
            tzMap.put(tzid, vtz);
        }
    }

    // merge item properties to icalendar props
    mergeCalendarProperties(masterEvent, (NoteItem) stamp.getItem());

    // bug 9606: handle displayAlarm with no trigger by not including
    // in exported icalendar
    if (masterAlarm != null && stamp.getDisplayAlarmTrigger() == null) {
        masterEvent.getAlarms().remove(masterAlarm);
        masterAlarm = null;
    }

    // If event is not recurring, skip all the event modification
    // processing
    if (!stamp.isRecurring()) {
        return masterCal;
    }

    // add all exception events
    NoteItem note = (NoteItem) stamp.getItem();
    TreeMap<String, VEvent> sortedMap = new TreeMap<String, VEvent>();
    for (NoteItem exception : note.getModifications()) {
        EventExceptionStamp exceptionStamp = HibEventExceptionStamp.getStamp(exception);

        // if modification isn't stamped as an event then ignore
        if (exceptionStamp == null) {
            continue;
        }

        // Get exception event copy
        VEvent exceptionEvent = (VEvent) CalendarUtils.copyComponent(exceptionStamp.getExceptionEvent());

        // ensure DURATION or DTEND exists on modfication
        if (ICalendarUtils.getDuration(exceptionEvent) == null) {
            ICalendarUtils.setDuration(exceptionEvent, ICalendarUtils.getDuration(masterEvent));
        }

        // merge item properties to icalendar props
        mergeCalendarProperties(exceptionEvent, exception);

        // check for inherited anyTime
        if (exceptionStamp.isAnyTime() == null) {
            DtStart modDtStart = exceptionEvent.getStartDate();
            // remove "missing" value
            modDtStart.getParameters().remove(modDtStart.getParameter(ICalendarConstants.PARAM_X_OSAF_ANYTIME));
            // add inherited value
            if (stamp.isAnyTime()) {
                modDtStart.getParameters().add(getAnyTimeXParam());
            }
        }

        // Check for inherited displayAlarm, which is represented
        // by a valarm with no TRIGGER
        VAlarm displayAlarm = getDisplayAlarm(exceptionEvent);
        if (displayAlarm != null && exceptionStamp.getDisplayAlarmTrigger() == null) {
            exceptionEvent.getAlarms().remove(displayAlarm);
            if (masterAlarm != null) {
                exceptionEvent.getAlarms().add(masterAlarm);
            }
        }

        // Check for inherited LOCATION which is represented as null LOCATION
        // If inherited, and master event has a LOCATION, then add it to exception
        if (exceptionStamp.getLocation() == null && masterLocation != null) {
            ICalendarUtils.setLocation(masterLocation, exceptionEvent);
        }

        sortedMap.put(exceptionStamp.getRecurrenceId().toString(), exceptionEvent);

        // verify that timezones are present for exceptions, and add if not
        tzid = getTzId(exceptionStamp.getStartDate());
        if (tzid != null && !tzMap.containsKey(tzid)) {
            TimeZone tz = TIMEZONE_REGISTRY.getTimeZone(tzid);
            if (tz != null) {
                VTimeZone vtz = tz.getVTimeZone();
                masterCal.getComponents().add(0, vtz);
                tzMap.put(tzid, vtz);
            }
        }

        tzid = getTzId(exceptionStamp.getEndDate());
        if (tzid != null && !tzMap.containsKey(tzid)) {
            TimeZone tz = TIMEZONE_REGISTRY.getTimeZone(tzid);
            if (tz != null) {
                VTimeZone vtz = tz.getVTimeZone();
                masterCal.getComponents().add(0, vtz);
                tzMap.put(tzid, vtz);
            }
        }
    }

    masterCal.getComponents().addAll(sortedMap.values());

    return masterCal;
}

From source file:com.tesora.dve.sql.parser.TranslatorUtils.java

public void assignPositions() {
    if (pc.getCapability() == Capability.PARSING_ONLY)
        return;//from w  w  w  .  ja v  a2s  .c  o m
    if (!parameters.isEmpty()) {
        TreeMap<SourceLocation, Parameter> map = new TreeMap<SourceLocation, Parameter>();
        for (Parameter p : parameters)
            map.put(p.getSourceLocation(), p);
        if (map.size() != parameters.size())
            throw new SchemaException(Pass.SECOND, "Lost parameters while doing position assignment");
        int i = 0;
        for (Parameter p : map.values()) {
            p.setPosition(i);
            pc.getValueManager().registerParameter(pc, p);
            i++;
        }
    }
    if (literals.size() > KnownVariables.CACHED_PLAN_LITERALS_MAX
            .getValue(pc.getConnection().getVariableSource()).intValue()) {
        forceUncacheable(ValueManager.CacheStatus.NOCACHE_TOO_MANY_LITERALS);
    } else {
        TreeMap<SourceLocation, DelegatingLiteralExpression> map = new TreeMap<SourceLocation, DelegatingLiteralExpression>();
        for (Pair<DelegatingLiteralExpression, Object> p : literals) {
            map.put(p.getFirst().getSourceLocation(), p.getFirst());
        }
        if (map.size() != literals.size())
            throw new SchemaException(Pass.SECOND, "Lost literals while doing position assignment");
        int i = 0;
        for (DelegatingLiteralExpression dle : map.values()) {
            pc.getValueManager().addLiteralValue(pc, i, literals.get(dle.getPosition()).getSecond(), dle);
            dle.setPosition(i, true);
            i++;
        }
    }
}

From source file:org.biomart.configurator.controller.MartController.java

/**
 * @param fksToBeDropped// w w  w .j  av a  2  s .c  o  m
 * @param dmd
 * @param schema
 * @param catalog
 * @param stepSize
 * @throws SQLException
 * @throws DataModelException
 */
public void synchroniseKeysUsingDMD(final SourceSchema ss, final Collection<ForeignKey> fksToBeDropped,
        final DatabaseMetaData dmd, final String schema, final String catalog)
        throws SQLException, DataModelException {
    Log.debug("Running DMD key synchronisation");
    // Loop through all the tables in the database, which is the same
    // as looping through all the primary keys.
    Log.debug("Finding tables");
    for (final Iterator<Table> i = ss.getTables().iterator(); i.hasNext();) {

        // Obtain the table and its primary key.
        final SourceTable pkTable = (SourceTable) i.next();
        final PrimaryKey pk = pkTable.getPrimaryKey();
        // Skip all tables which have no primary key.
        if (pk == null)
            continue;

        Log.debug("Processing primary key " + pk);

        // Make a list of relations that already exist in this schema,
        // from some previous run. Any relations that are left in this
        // list by the end of the loop for this table no longer exist in
        // the database, and will be dropped.
        final Collection<Relation> relationsToBeDropped = new TreeSet<Relation>(pk.getRelations()); // Tree for
                                                                                                    // order

        // Identify all foreign keys in the database metadata that refer
        // to the current primary key.
        Log.debug("Finding referring foreign keys");
        String searchCatalog = catalog;
        String searchSchema = schema;
        final ResultSet dbTblFKCols = dmd.getExportedKeys(searchCatalog, searchSchema, pkTable.getName());

        // Loop through the results. There will be one result row per
        // column per key, so we need to build up a set of key columns
        // in a map.
        // The map keys represent the column position within a key. Each
        // map value is a list of columns. In essence the map is a 2-D
        // representation of the foreign keys which refer to this PK,
        // with the keys of the map (Y-axis) representing the column
        // position in the FK, and the values of the map (X-axis)
        // representing each individual FK. In all cases, FK columns are
        // assumed to be in the same order as the PK columns. The map is
        // sorted by key column position.
        // An assumption is made that the query will return columns from
        // the FK in the same order as all other FKs, ie. all column 1s
        // will be returned before any 2s, and then all 2s will be
        // returned
        // in the same order as the 1s they are associated with, etc.
        final TreeMap<Short, List<Column>> dbFKs = new TreeMap<Short, List<Column>>();
        while (dbTblFKCols.next()) {
            final String fkTblName = dbTblFKCols.getString("FKTABLE_NAME");
            final String fkColName = dbTblFKCols.getString("FKCOLUMN_NAME");
            final Short fkColSeq = new Short(dbTblFKCols.getShort("KEY_SEQ"));
            if (fkTblName != null && fkTblName.contains("$")) { // exclude ORACLE's temporary tables (unlikely to be
                                                                // found here though)
                continue;
            }

            // Note the column.
            if (!dbFKs.containsKey(fkColSeq))
                dbFKs.put(fkColSeq, new ArrayList<Column>());
            // In some dbs, FKs can be invalid, so we need to check
            // them.
            final Table fkTbl = ss.getTableByName(fkTblName);
            if (fkTbl != null) {
                final Column fkCol = (Column) fkTbl.getColumnByName(fkColName);
                if (fkCol != null)
                    (dbFKs.get(fkColSeq)).add(fkCol);
            }
        }
        dbTblFKCols.close();

        // Sort foreign keys by name (case insensitive)
        for (List<Column> columnList : dbFKs.values()) {
            Collections.sort(columnList);
        }

        // Only construct FKs if we actually found any.
        if (!dbFKs.isEmpty()) {
            // Identify the sequence of the first column, which may be 0
            // or 1, depending on database implementation.
            final int firstColSeq = ((Short) dbFKs.firstKey()).intValue();

            // How many columns are in the PK?
            final int pkColCount = pkTable.getPrimaryKey().getColumns().size();

            // How many FKs do we have?
            final int fkCount = dbFKs.get(dbFKs.firstKey()).size();

            // Loop through the FKs, and construct each one at a time.
            for (int j = 0; j < fkCount; j++) {
                // Set up an array to hold the FK columns.
                final List<Column> candidateFKColumns = new ArrayList<Column>();

                // For each FK column name, look up the actual column in
                // the table.
                for (final Iterator<Map.Entry<Short, List<Column>>> k = dbFKs.entrySet().iterator(); k
                        .hasNext();) {
                    final Map.Entry<Short, List<Column>> entry = k.next();
                    final Short keySeq = (Short) entry.getKey();
                    // Convert the db-specific column index to a
                    // 0-indexed figure for the array of fk columns.
                    final int fkColSeq = keySeq.intValue() - firstColSeq;
                    candidateFKColumns.add((Column) (entry.getValue()).get(j));
                }

                // Create a template foreign key based around the set
                // of candidate columns we found.
                ForeignKey fkObject;
                try {
                    List<Column> columns = new ArrayList<Column>();
                    for (int k = 0; k < candidateFKColumns.size(); k++) {
                        columns.add(candidateFKColumns.get(k));
                    }
                    fkObject = new ForeignKey(columns);
                    // new KeyController(fkObject);
                } catch (final Throwable t) {
                    throw new BioMartError(t);
                }
                final Table fkTable = fkObject.getTable();

                // If any FK already exists on the target table with the
                // same columns in the same order, then reuse it.
                boolean fkAlreadyExists = false;
                for (final Iterator<ForeignKey> f = fkTable.getForeignKeys().iterator(); f.hasNext()
                        && !fkAlreadyExists;) {
                    final ForeignKey candidateFK = f.next();
                    if (candidateFK.equals(fkObject)) {
                        // Found one. Reuse it!
                        fkObject = candidateFK;
                        // Update the status to indicate that the FK is
                        // backed by the database, if previously it was
                        // handmade.
                        if (fkObject.getStatus().equals(ComponentStatus.HANDMADE))
                            fkObject.setStatus(ComponentStatus.INFERRED);
                        // Remove the FK from the list to be dropped
                        // later, as it definitely exists now.
                        fksToBeDropped.remove(candidateFK);
                        // Flag the key as existing.
                        fkAlreadyExists = true;
                    }
                }

                // Has the key been reused, or is it a new one?
                if (!fkAlreadyExists)
                    try {
                        fkTable.getForeignKeys().add(fkObject);
                        // fkTable.getForeignKeys().add(fk);
                    } catch (final Throwable t) {
                        throw new BioMartError(t);
                    }

                // Work out whether the relation from the FK to
                // the PK should be 1:M or 1:1. The rule is that
                // it will be 1:M in all cases except where the
                // FK table has a PK with identical columns to
                // the FK, in which case it is 1:1, as the FK
                // is unique.
                Cardinality card = Cardinality.MANY_A;
                final PrimaryKey fkPK = fkTable.getPrimaryKey();
                if (fkPK != null && fkObject.getColumns().equals(fkPK.getColumns()))
                    card = Cardinality.ONE;

                // Check to see if it already has a relation.
                boolean relationExists = false;
                for (final Iterator<Relation> f = fkObject.getRelations().iterator(); f.hasNext();) {
                    // Obtain the next relation.
                    final Relation candidateRel = f.next();

                    // a) a relation already exists between the FK
                    // and the PK.
                    if (candidateRel.getOtherKey(fkObject).equals(pk)) {
                        // If cardinality matches, make it
                        // inferred. If doesn't match, make it
                        // modified and update original cardinality.
                        try {
                            if (card.equals(candidateRel.getCardinality())) {
                                if (!candidateRel.getStatus().equals(ComponentStatus.INFERRED_INCORRECT))
                                    candidateRel.setStatus(ComponentStatus.INFERRED);
                            } else {
                                if (!candidateRel.getStatus().equals(ComponentStatus.INFERRED_INCORRECT))
                                    candidateRel.setStatus(ComponentStatus.MODIFIED);
                                candidateRel.setOriginalCardinality(card);
                            }
                        } catch (final AssociationException ae) {
                            throw new BioMartError(ae);
                        }
                        // Don't drop it at the end of the loop.
                        relationsToBeDropped.remove(candidateRel);
                        // Say we've found it.
                        relationExists = true;
                    }

                    // b) a handmade relation exists elsewhere which
                    // should not be dropped. All other relations
                    // elsewhere will be dropped.
                    else if (candidateRel.getStatus().equals(ComponentStatus.HANDMADE))
                        // Don't drop it at the end of the loop.
                        relationsToBeDropped.remove(candidateRel);
                }

                // If relation did not already exist, create it.
                if (!relationExists && !pk.equals(fkObject)) {
                    // Establish the relation.
                    try {
                        new RelationSource(pk, fkObject, card);
                        // pk.getObject().addRelation(relation);
                        // fk.getObject().addRelation(relation);
                    } catch (final Throwable t) {
                        throw new BioMartError(t);
                    }
                }
            }
        }

        // Remove any relations that we didn't find in the database (but
        // leave the handmade ones behind).
        for (final Iterator<Relation> j = relationsToBeDropped.iterator(); j.hasNext();) {
            final Relation r = j.next();
            if (r.getStatus().equals(ComponentStatus.HANDMADE))
                continue;
            r.getFirstKey().removeRelation(r);
            r.getSecondKey().removeRelation(r);
        }
    }
}

From source file:de.tudarmstadt.ukp.uby.integration.alignment.xml.transform.sensealignments.VnFnSenseAlignmentXml.java

/**
 * @param metadata/*  www  .j a  va 2s  .  c om*/
 * @throws IOException
 */
@Override
public void toAlignmentXml(XmlMeta metadata) throws IOException {

    Lexicon vn = uby.getLexiconByName(lexiconName);
    TreeMap<String, Source> sourceMap = new TreeMap<>();

    int noSource = 0;
    int lines = 0;
    int count = 0;
    ArrayList<String> output = new ArrayList<String>();
    try {
        DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
        DocumentBuilder builder = factory.newDocumentBuilder();
        Document doc = builder.parse(new File(alignmentFile));
        doc.getDocumentElement().normalize();
        NodeList entries = doc.getElementsByTagName("vncls");
        for (int i = 0; i < entries.getLength(); i++) {
            Node alignment = entries.item(i);
            NamedNodeMap atts = alignment.getAttributes();
            String vnClass = atts.getNamedItem("class").getTextContent();
            String vnLemma = atts.getNamedItem("vnmember").getTextContent();
            String luId = atts.getNamedItem("fnlexent").getTextContent();
            // there are mappings with empty (fn) target:
            if (luId.equals("")) {
                noSource++;
            } else {
                // add output here
                output.add(luId + "\t" + vnLemma + "\t" + vnClass + "\n");

                List<LexicalEntry> vnentries = uby.getLexicalEntries(vnLemma, EPartOfSpeech.verb, vn);
                if (vnentries.size() > 0) {
                    for (LexicalEntry e : vnentries) {
                        List<Sense> vnSenses = e.getSenses();
                        for (Sense vns : vnSenses) {
                            String senseId = vns.getId();
                            // filter by VN-class
                            List<SemanticLabel> labels = uby.getSemanticLabelsbySenseIdbyType(senseId,
                                    ELabelTypeSemantics.verbnetClass.toString());
                            for (SemanticLabel l : labels) {
                                String[] labelItems = l.getLabel().split("-");
                                StringBuffer parsedLabel = new StringBuffer();
                                parsedLabel.append(labelItems[1]);
                                for (int ji = 2; ji < labelItems.length; ji++) {
                                    parsedLabel.append("-" + labelItems[ji]);
                                }
                                if (parsedLabel.toString().equals(vnClass)) {
                                    // get sourceMa
                                    Source source = null;
                                    if (sourceMap.containsKey(luId)) {
                                        source = sourceMap.get(luId);
                                    } else {
                                        source = new Source();
                                        source.ref = luId;
                                    }

                                    Target target = new Target();
                                    target.ref = vns.getMonolingualExternalRefs().iterator().next()
                                            .getExternalReference();
                                    target.decision = new Decision();
                                    target.decision.value = true;
                                    target.decision.confidence = DEFAULTCONFIDENCE;

                                    // add target to source
                                    if (source.targets.size() > 0) {
                                        source.targets.add(target);
                                    } else {
                                        source.targets.add(target);
                                    }
                                    count++;
                                    sourceMap.put(source.ref, source);
                                }
                            }
                        }
                    }
                }
            }
            lines++;
        }
    } catch (IOException | ParserConfigurationException | SAXException e) {
        throw new IOException(e);
    }
    logString.append("Converted " + alignmentFile + ", statistics:" + LF);
    logString.append("\tInput Lines: " + lines + LF);
    logString.append("\tOutput: " + output.size() + LF);
    logString.append("\tNo alignment target: " + noSource + LF);
    logString.append("\tControl: output +  no alignment = input lines: " + (output.size() + noSource) + LF);
    logString.append("\tNumber of alignment pairs in output:" + count);
    logger.info(logString.toString());

    writer.writeMetaData(metadata);
    Alignments alignments = new Alignments();
    alignments.source = new LinkedList<>();
    alignments.source.addAll(sourceMap.values());
    writer.writeAlignments(alignments);
    writer.close();
}