attempt to reduce the overhead imposed by database access
authorAndreas Kohlbecker <a.kohlbecker@bgbm.org>
Fri, 4 Sep 2015 06:07:31 +0000 (08:07 +0200)
committerAndreas Kohlbecker <a.kohlbecker@bgbm.org>
Wed, 29 Jun 2016 13:57:07 +0000 (15:57 +0200)
cdmlib-persistence/src/main/java/eu/etaxonomy/cdm/persistence/dao/hibernate/taxon/ClassificationDaoHibernateImpl.java
cdmlib-persistence/src/main/java/eu/etaxonomy/cdm/persistence/dao/taxon/IClassificationDao.java
cdmlib-persistence/src/main/java/eu/etaxonomy/cdm/persistence/dto/ClassificationLookupDTO.java [new file with mode: 0644]
cdmlib-persistence/src/test/java/eu/etaxonomy/cdm/persistence/dao/hibernate/taxon/ClassificationDaoHibernateImplTest.java
cdmlib-services/src/main/java/eu/etaxonomy/cdm/api/service/IService.java
cdmlib-services/src/main/java/eu/etaxonomy/cdm/api/service/ServiceBase.java
cdmlib-services/src/main/java/eu/etaxonomy/cdm/api/service/description/TransmissionEngineDistribution.java

index d47191a8d1a7897910e0d0146605dd4a36c50285..2ef63957e4e8d59aaaecfe85e98fced9645ae707 100644 (file)
@@ -27,6 +27,7 @@ import eu.etaxonomy.cdm.model.taxon.TaxonNode;
 import eu.etaxonomy.cdm.persistence.dao.hibernate.common.IdentifiableDaoBase;\r
 import eu.etaxonomy.cdm.persistence.dao.taxon.IClassificationDao;\r
 import eu.etaxonomy.cdm.persistence.dao.taxon.ITaxonNodeDao;\r
+import eu.etaxonomy.cdm.persistence.dto.ClassificationLookupDTO;\r
 \r
 /**\r
  * @author a.mueller\r
@@ -254,6 +255,33 @@ public class ClassificationDaoHibernateImpl extends IdentifiableDaoBase<Classifi
         return persistentObject.getUuid();\r
     }\r
 \r
+    @Override\r
+    public ClassificationLookupDTO classificationLookup(Classification classification) {\r
+\r
+        ClassificationLookupDTO classificationLookupDTO = new ClassificationLookupDTO(classification);\r
+\r
+        // only for debugging:\r
+//        logger.setLevel(Level.TRACE);\r
+//        Logger.getLogger("org.hibernate.SQL").setLevel(Level.DEBUG);\r
+\r
+        String hql = "select t.id, n.rank, tp.id from TaxonNode as tn join tn.classification as c join tn.taxon as t join t.name as n "\r
+                + " left join tn.parent as tnp left join tnp.taxon as tp "\r
+                + " where c = :classification";\r
+        Query query = getSession().createQuery(hql);\r
+        query.setParameter("classification", classification);\r
+        @SuppressWarnings("unchecked")\r
+        List<Object[]> result = query.list();\r
+        for(Object[] row : result) {\r
+            Integer parentId = null;\r
+//            if(row.length == 3) { // TODO check necessary?\r
+//                parentId = (Integer) row[2];\r
+//            }\r
+            classificationLookupDTO.add((Integer)row[0], (Rank)row[1], parentId);\r
+        }\r
+\r
+        return classificationLookupDTO ;\r
+    }\r
+\r
 \r
 \r
 \r
index b26c886d9eb95b2a31a52ffb0fc4253bf69bbb22..1c1fec1d9a1b1c2b07436678be0169d5dcec0dce 100644 (file)
@@ -16,6 +16,7 @@ import eu.etaxonomy.cdm.model.taxon.Classification;
 import eu.etaxonomy.cdm.model.taxon.Taxon;\r
 import eu.etaxonomy.cdm.model.taxon.TaxonNode;\r
 import eu.etaxonomy.cdm.persistence.dao.common.IIdentifiableDao;\r
+import eu.etaxonomy.cdm.persistence.dto.ClassificationLookupDTO;\r
 \r
 /**\r
  * @author a.mueller\r
@@ -63,7 +64,9 @@ public interface IClassificationDao extends IIdentifiableDao<Classification> {
     public List<TaxonNode> listChildrenOf(Taxon taxon, Classification classification, Integer pageSize, Integer pageIndex, List<String> propertyPaths);\r
 \r
 \r
-    public abstract Long countChildrenOf(Taxon taxon, Classification classification);\r
+    public Long countChildrenOf(Taxon taxon, Classification classification);\r
+\r
+    public ClassificationLookupDTO classificationLookup(Classification classification);\r
 \r
     /**\r
      * @param taxon\r
diff --git a/cdmlib-persistence/src/main/java/eu/etaxonomy/cdm/persistence/dto/ClassificationLookupDTO.java b/cdmlib-persistence/src/main/java/eu/etaxonomy/cdm/persistence/dto/ClassificationLookupDTO.java
new file mode 100644 (file)
index 0000000..83884e3
--- /dev/null
@@ -0,0 +1,97 @@
+// $Id$
+/**
+* Copyright (C) 2015 EDIT
+* European Distributed Institute of Taxonomy
+* http://www.e-taxonomy.eu
+*
+* The contents of this file are subject to the Mozilla Public License Version 1.1
+* See LICENSE.TXT at the top of this package for the full license terms.
+*/
+package eu.etaxonomy.cdm.persistence.dto;
+
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+
+import eu.etaxonomy.cdm.model.name.Rank;
+import eu.etaxonomy.cdm.model.taxon.Classification;
+
+/**
+ * @author a.kohlbecker
+ * @date Sep 3, 2015
+ *
+ */
+public class ClassificationLookupDTO {
+
+    private final Map<Integer, Integer> taxonIdToParentId = new HashMap<Integer, Integer>();
+    private final Map<Rank,Collection<Integer>> taxonIdByRank = new HashMap<Rank, Collection<Integer>>();
+    private final Map<Integer,Collection<Integer>> childTaxonMap = new HashMap<Integer,Collection<Integer>>();
+    private Classification classification = null;
+
+    /**
+     * @return the taxonIds
+     */
+    public Set<Integer> getTaxonIds() {
+        return taxonIdToParentId.keySet();
+    }
+
+    /**
+     * @return the taxonIdByRank
+     */
+    public Map<Rank, Collection<Integer>> getTaxonIdByRank() {
+        return taxonIdByRank;
+    }
+
+    /**
+     * @return the childTaxonMap
+     */
+    public Map<Integer, Collection<Integer>> getChildTaxonMap() {
+        return childTaxonMap;
+    }
+
+    /**
+     * @return the classification
+     */
+    public Classification getClassification() {
+        return classification;
+    }
+
+    /**
+     *
+     * @param classification
+     *      Must never be null the ClassificationLookupDTO always specific to one
+     *      Classification.
+     */
+    public ClassificationLookupDTO(Classification classification) {
+        this.classification  = classification;
+    }
+
+    public void add(Integer taxonId, Rank rank, Integer parentId) {
+
+        taxonIdToParentId.put(taxonId, parentId);
+
+        if(!childTaxonMap.containsKey(parentId)) {
+            childTaxonMap.put(parentId, new HashSet<Integer>());
+        }
+        childTaxonMap.get(parentId).add(taxonId);
+
+        if(!taxonIdByRank.containsKey(rank)) {
+            taxonIdByRank.put(rank, new HashSet<Integer>());
+        }
+        taxonIdByRank.get(rank).add(taxonId);
+    }
+
+    public void dropRank(Rank rank) {
+        Collection<Integer> idsForRank = taxonIdByRank.get(rank);
+        taxonIdByRank.remove(rank);
+
+        for(Integer taxonId : idsForRank) {
+            Integer parentId = taxonIdToParentId.get(taxonId);
+            taxonIdToParentId.remove(taxonId);
+            childTaxonMap.remove(parentId);
+        }
+    }
+
+}
index 96b0244b37dfb476d55743c22cd6a074af298c8c..cd1262ad0a482d434e6d6d4501b29bbdbefcc9ca 100644 (file)
@@ -30,6 +30,7 @@ import eu.etaxonomy.cdm.model.taxon.TaxonNode;
 import eu.etaxonomy.cdm.persistence.dao.reference.IReferenceDao;
 import eu.etaxonomy.cdm.persistence.dao.taxon.IClassificationDao;
 import eu.etaxonomy.cdm.persistence.dao.taxon.ITaxonDao;
+import eu.etaxonomy.cdm.persistence.dto.ClassificationLookupDTO;
 import eu.etaxonomy.cdm.test.integration.CdmTransactionalIntegrationTest;
 import eu.etaxonomy.cdm.test.unitils.CleanSweepInsertLoadStrategy;
 
@@ -166,6 +167,15 @@ public class ClassificationDaoHibernateImplTest extends CdmTransactionalIntegrat
 //        }
        }
 
+    @Test
+    @DataSet(value="ClassificationDaoHibernateImplTest.listRankSpecificRootNodes.xml")
+    public void testClassificationLookup() {
+
+        Classification classification = classificationDao.load(UUID.fromString(CLASSIFICATION_FULL_UUID));
+        ClassificationLookupDTO classificationLookupDto = classificationDao.classificationLookup(classification);
+        assertEquals(4, classificationLookupDto.getTaxonIds().size());
+    }
+
 
     /**
      * At the moment the data created is special to the issue http://dev.e-taxonomy.eu/trac/ticket/2778
index e822ab39ea2ee6c663708b34f6ac35f77a4cce61..918799bb04e96555909d09f1e3efbb5dc17e70cf 100644 (file)
@@ -161,7 +161,9 @@ public interface IService<T extends ICdmBase>{
      *\r
      * @param idSet\r
      * @return\r
+     * @deprecated use {@link #listByIds(Set, Integer, Integer, List, List)} instead\r
      */\r
+    @Deprecated\r
     public List<T> findById(Set<Integer> idSet);  //can't be called find(Set<Integer>) as this conflicts with find(Set<UUID)\r
 \r
 \r
@@ -377,6 +379,17 @@ public interface IService<T extends ICdmBase>{
      */\r
     public List<T> merge(List<T> detachedObjects);\r
 \r
+    /**\r
+     * @param idSet\r
+     * @param pageSize\r
+     * @param pageNumber\r
+     * @param orderHints\r
+     * @param propertyPaths\r
+     * @return\r
+     */\r
+    List<T> listByIds(Set<Integer> idSet, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints,\r
+            List<String> propertyPaths);\r
+\r
     /**\r
      * This method allows for the possibility of returning the input transient\r
      * entities instead of the merged persistent entity\r
index 5addd1c275052c996ea3e8a6661bc7cd51a98922..b7e63038e8ad7efcc5ac7f92f969e39311c2cc69 100644 (file)
@@ -120,6 +120,12 @@ public abstract class ServiceBase<T extends CdmBase, DAO extends ICdmEntityDao<T
         return dao.listByIds(idSet, null, null, null, null);\r
     }\r
 \r
+    @Override\r
+    @Transactional(readOnly = true)\r
+    public List<T> listByIds(Set<Integer> idSet, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths){\r
+        return dao.listByIds(idSet, pageSize, pageNumber, orderHints, propertyPaths);\r
+    }\r
+\r
     @Override\r
     @Transactional(readOnly = true)\r
     public T find(UUID uuid) {\r
index 076ca01ccce5e558ebb3b25ff539ea46e778c81e..b5f5288a839a290dbcf0545744907a5aeb534e08 100644 (file)
@@ -13,6 +13,7 @@ import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.HashMap;
 import java.util.HashSet;
+import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
@@ -46,6 +47,7 @@ import eu.etaxonomy.cdm.model.common.Extension;
 import eu.etaxonomy.cdm.model.common.ExtensionType;
 import eu.etaxonomy.cdm.model.common.Marker;
 import eu.etaxonomy.cdm.model.common.MarkerType;
+import eu.etaxonomy.cdm.model.common.OrderedTermBase;
 import eu.etaxonomy.cdm.model.description.DescriptionElementBase;
 import eu.etaxonomy.cdm.model.description.Distribution;
 import eu.etaxonomy.cdm.model.description.PresenceAbsenceTerm;
@@ -54,7 +56,10 @@ import eu.etaxonomy.cdm.model.location.NamedArea;
 import eu.etaxonomy.cdm.model.name.Rank;
 import eu.etaxonomy.cdm.model.taxon.Classification;
 import eu.etaxonomy.cdm.model.taxon.Taxon;
+import eu.etaxonomy.cdm.model.taxon.TaxonBase;
 import eu.etaxonomy.cdm.model.taxon.TaxonNode;
+import eu.etaxonomy.cdm.persistence.dao.taxon.IClassificationDao;
+import eu.etaxonomy.cdm.persistence.dto.ClassificationLookupDTO;
 
 /**
  * The TransmissionEngineDistribution is meant to be used from within a service class.
@@ -100,6 +105,17 @@ public class TransmissionEngineDistribution { //TODO extends IoBase?
     final boolean ONLY_FISRT_BATCH = false;
 
 
+    protected static final List<String> TAXONDESCRIPTION_INIT_STRATEGY = Arrays.asList(new String [] {
+            "description.markers.markerType",
+            "description.elements.markers.markerType",
+            "description.elements.area",
+            "description.elements.sources.citation.authorship",
+            "description.elements.sources.nameUsedInSource",
+            "description.elements.multilanguageText",
+            "name.status.type",
+    });
+
+
     /**
      * A map which contains the status terms as key and the priority as value
      * The map will contain both, the PresenceTerms and the AbsenceTerms
@@ -118,6 +134,9 @@ public class TransmissionEngineDistribution { //TODO extends IoBase?
     @Autowired
     private IClassificationService classificationService;
 
+    @Autowired
+    private IClassificationDao classificationDao;
+
     @Autowired
     private INameService mameService;
 
@@ -311,44 +330,61 @@ public class TransmissionEngineDistribution { //TODO extends IoBase?
             monitor = new NullProgressMonitor();
         }
 
-        // take start time for performance testing
-        // NOTE: use ONLY_FISRT_BATCH = true to measure only one batch
-        double start = System.currentTimeMillis();
+        logger.setLevel(Level.INFO); // TRACE will slow down a lot since it forces loading all term representations
+
+        logger.info("Hibernate JDBC Batch size: "
+                + ((SessionFactoryImplementor) getSession().getSessionFactory()).getSettings().getJdbcBatchSize());
 
         // only for debugging:
         logger.setLevel(Level.INFO);
         //Logger.getLogger("org.hibernate.SQL").setLevel(Level.DEBUG);
 
-        logger.info("Hibernate JDBC Batch size: "
-                + ((SessionFactoryImplementor) getSession().getSessionFactory()).getSettings().getJdbcBatchSize());
+        Set<Classification> classifications = new HashSet<Classification>();
+        if(classification == null) {
+            classifications.addAll(classificationService.listClassifications(null, null, null, null));
+        } else {
+            classifications.add(classification);
+        }
 
-        int workTicks = mode.equals(AggregationMode.byAreasAndRanks) ? 400 : 200;
-        monitor.beginTask("Accumulating distributions", workTicks + 1 );
+        int aggregationWorkTicks = mode.equals(AggregationMode.byAreasAndRanks) ? 400 : 200;
 
+        // take start time for performance testing
+        // NOTE: use ONLY_FISRT_BATCH = true to measure only one batch
+        double start = System.currentTimeMillis();
 
-        monitor.subTask("updating Priorities");
+        monitor.beginTask("Accumulating distributions", (classifications.size() * aggregationWorkTicks) + 1 );
         updatePriorities();
         monitor.worked(1);
-        monitor.setTaskName("Accumulating distributions");
 
-        monitor.subTask("Accumulating distributions to super areas");
-        if (mode.equals(AggregationMode.byAreas) || mode.equals(AggregationMode.byAreasAndRanks)) {
-            accumulateByArea(superAreas, classification, new SubProgressMonitor(monitor, 200),
-                    mode.equals(AggregationMode.byAreas) || mode.equals(AggregationMode.byAreasAndRanks));
-        }
-        double end1 = System.currentTimeMillis();
-        logger.info("Time elapsed for accumulateByArea() : " + (end1 - start) / (1000) + "s");
-
-        double start2 = System.currentTimeMillis();
-        monitor.subTask("Accumulating distributions to higher ranks");
-        if (mode.equals(AggregationMode.byRanks) || mode.equals(AggregationMode.byAreasAndRanks)) {
-            accumulateByRank(lowerRank, upperRank, classification, new SubProgressMonitor(monitor, 200),
-                    mode.equals(AggregationMode.byRanks));
-        }
+        for(Classification _classification : classifications) {
+
+            ClassificationLookupDTO classificationLookupDao = classificationDao.classificationLookup(_classification);
+
+            monitor.subTask("Accumulating distributions to super areas for " + _classification.getTitleCache());
+            if (mode.equals(AggregationMode.byAreas) || mode.equals(AggregationMode.byAreasAndRanks)) {
+                accumulateByArea(superAreas, classificationLookupDao, new SubProgressMonitor(monitor, 200),
+                        mode.equals(AggregationMode.byAreas) || mode.equals(AggregationMode.byAreasAndRanks));
+            }
+            monitor.subTask("Accumulating distributions to higher ranks for " + _classification.getTitleCache());
+
+            double end1 = System.currentTimeMillis();
 
-        double end2 = System.currentTimeMillis();
-        logger.info("Time elapsed for accumulateByRank() : " + (end2 - start2) / (1000) + "s");
-        logger.info("Time elapsed for accumulate(): " + (end2 - start) / (1000) + "s");
+            logger.info("Time elapsed for accumulateByArea() : " + (end1 - start) / (1000) + "s");
+
+            double start2 = System.currentTimeMillis();
+            if (mode.equals(AggregationMode.byRanks) || mode.equals(AggregationMode.byAreasAndRanks)) {
+                accumulateByRank(lowerRank, upperRank, classification, new SubProgressMonitor(monitor, 200),
+                        mode.equals(AggregationMode.byRanks));
+            }
+
+            double end2 = System.currentTimeMillis();
+            logger.info("Time elapsed for accumulateByRank() : " + (end2 - start2) / (1000) + "s");
+            logger.info("Time elapsed for accumulate(): " + (end2 - start) / (1000) + "s");
+
+            if(ONLY_FISRT_BATCH) {
+                break;
+            }
+        }
     }
 
     /**
@@ -371,10 +407,10 @@ public class TransmissionEngineDistribution { //TODO extends IoBase?
      *
      * @param superAreas
      *      the areas to which the subordinate areas should be projected
-     * @param classification
-     *      limit the accumulation process to a specific classification (not yet implemented)
+     * @param classificationLookupDao
+     *
      */
-    protected void accumulateByArea(List<NamedArea> superAreas, Classification classification,  IProgressMonitor subMonitor, boolean doClearDescriptions) {
+    protected void accumulateByArea(List<NamedArea> superAreas, ClassificationLookupDTO classificationLookupDao,  IProgressMonitor subMonitor, boolean doClearDescriptions) {
 
         int batchSize = 1000;
 
@@ -388,9 +424,11 @@ public class TransmissionEngineDistribution { //TODO extends IoBase?
         List<NamedArea> superAreaList = (List)termService.find(superAreaUuids);
 
         // visit all accepted taxa
-        Pager<Taxon> taxonPager = null;
+        subMonitor.beginTask("Accumulating by area ",  classificationLookupDao.getTaxonIds().size());
+        Iterator<Integer> taxonIdIterator = classificationLookupDao.getTaxonIds().iterator();
+
         int pageIndex = 0;
-        boolean isLastPage = false;
+        while (taxonIdIterator.hasNext()) {
         while (!isLastPage) {
 
             if(txStatus == null) {
@@ -398,45 +436,43 @@ public class TransmissionEngineDistribution { //TODO extends IoBase?
                 txStatus = startTransaction(false);
             }
 
-            //TODO limit by classification if not null
-            taxonPager = taxonService.page(Taxon.class, batchSize, pageIndex++, null, null);
-
-            if(taxonPager.getCurrentIndex() == 0){
-                subMonitor.beginTask("Accumulating by area ",  taxonPager.getCount().intValue());
+            // load taxa for this batch
+            List<TaxonBase> taxa = new ArrayList<TaxonBase>(batchSize);
+            Set<Integer> taxonIds = new HashSet<Integer>(batchSize);
+            while(taxonIdIterator.hasNext() && taxonIds.size() < batchSize ) {
+                taxonIds.add(taxonIdIterator.next());
             }
 
-            logger.debug("accumulateByArea() - taxon " + taxonPager.getFirstRecord() + " to " + taxonPager.getLastRecord() + " of " + taxonPager.getCount() + "]");
+//            logger.debug("accumulateByArea() - taxon " + taxonPager.getFirstRecord() + " to " + taxonPager.getLastRecord() + " of " + taxonPager.getCount() + "]");
 
-            if (taxonPager.getRecords().size() == 0){
-                break;
-            }
-            isLastPage = taxonPager.getRecords().size() < batchSize;
+            taxa = taxonService.listByIds(taxonIds, null, null, null, TAXONDESCRIPTION_INIT_STRATEGY);
 
             // iterate over the taxa and accumulate areas
-            for(Taxon taxon : taxonPager.getRecords()) {
+            for(TaxonBase taxon : taxa) {
                 if(logger.isDebugEnabled()){
-                    logger.debug("accumulateByArea() - taxon :" + taxon.getTitleCache());
+                    logger.debug("accumulateByArea() - taxon :" + taxonToString(taxon));
                 }
 
-                TaxonDescription description = findComputedDescription(taxon, doClearDescriptions);
-                List<Distribution> distributions = distributionsFor(taxon);
+                TaxonDescription description = findComputedDescription((Taxon)taxon, doClearDescriptions);
+                List<Distribution> distributions = distributionsFor((Taxon)taxon);
 
                 // Step through superAreas for accumulation of subAreas
                 for (NamedArea superArea : superAreaList){
 
                     // accumulate all sub area status
                     PresenceAbsenceTerm accumulatedStatus = null;
+                    // TODO consider using the TermHierarchyLookup (only in local branch a.kohlbecker)
                     Set<NamedArea> subAreas = getSubAreasFor(superArea);
                     for(NamedArea subArea : subAreas){
                         if(logger.isTraceEnabled()){
-                            logger.trace("accumulateByArea() - \t\t" + subArea.getLabel());
+                            logger.trace("accumulateByArea() - \t\t" + termToString(subArea));
                         }
                         // step through all distributions for the given subArea
                         for(Distribution distribution : distributions){
                             if(distribution.getArea() != null && distribution.getArea().equals(subArea) && distribution.getStatus() != null) {
                                 PresenceAbsenceTerm status = distribution.getStatus();
                                 if(logger.isTraceEnabled()){
-                                    logger.trace("accumulateByArea() - \t\t" + subArea.getLabel() + ": " + status.getLabel());
+                                    logger.trace("accumulateByArea() - \t\t" + termToString(subArea) + ": " + termToString(status));
                                 }
                                 // skip all having a status value different of those in byAreaIgnoreStatusList
                                 if (getByAreaIgnoreStatusList().contains(status)){
@@ -448,7 +484,7 @@ public class TransmissionEngineDistribution { //TODO extends IoBase?
                     } // next sub area
                     if (accumulatedStatus != null) {
                         if(logger.isDebugEnabled()){
-                            logger.debug("accumulateByArea() - \t >> " + superArea.getLabel() + ": " + accumulatedStatus.getLabel());
+                            logger.debug("accumulateByArea() - \t >> " + termToString(superArea) + ": " + termToString(accumulatedStatus));
                         }
                         // store new distribution element for superArea in taxon description
                         Distribution newDistribitionElement = Distribution.NewInstance(superArea, accumulatedStatus);
@@ -464,7 +500,6 @@ public class TransmissionEngineDistribution { //TODO extends IoBase?
 
             } // next taxon
 
-            taxonPager = null;
             flushAndClear();
 
             // commit for every batch, otherwise the persistent context
@@ -481,6 +516,32 @@ public class TransmissionEngineDistribution { //TODO extends IoBase?
         subMonitor.done();
     }
 
+    /**
+     * @param taxon
+     * @param logger2
+     * @return
+     */
+    private String taxonToString(TaxonBase taxon) {
+        if(logger.isTraceEnabled()) {
+            return taxon.getTitleCache();
+        } else {
+            return taxon.toString();
+        }
+    }
+
+    /**
+     * @param taxon
+     * @param logger2
+     * @return
+     */
+    private String termToString(OrderedTermBase<?> term) {
+        if(logger.isTraceEnabled()) {
+            return term.getLabel() + " [" + term.getIdInVocabulary() + "]";
+        } else {
+            return term.getIdInVocabulary();
+        }
+    }
+
    /**
     * Step 2: Accumulate by ranks staring from lower rank to upper rank, the status of all children
     * are accumulated on each rank starting from lower rank to upper rank.
@@ -520,7 +581,7 @@ public class TransmissionEngineDistribution { //TODO extends IoBase?
         for (Rank rank : ranks) {
 
             if(logger.isDebugEnabled()){
-                logger.debug("accumulateByRank() - at Rank '" + rank.getLabel() + "'");
+                logger.debug("accumulateByRank() - at Rank '" + termToString(rank) + "'");
             }
 
             Pager<TaxonNode> taxonPager = null;
@@ -562,13 +623,13 @@ public class TransmissionEngineDistribution { //TODO extends IoBase?
                         Taxon taxon = taxonNode.getTaxon();
                         if (taxaProcessedIds.contains(taxon.getId())) {
                             if(logger.isDebugEnabled()){
-                                logger.debug("accumulateByRank() - skipping already processed taxon :" + taxon.getTitleCache());
+                                logger.debug("accumulateByRank() - skipping already processed taxon :" + taxonToString(taxon));
                             }
                             continue;
                         }
                         taxaProcessedIds.add(taxon.getId());
                         if(logger.isDebugEnabled()){
-                            logger.debug("accumulateByRank() [" + rank.getLabel() + "] - taxon :" + taxon.getTitleCache());
+                            logger.debug("accumulateByRank() [" + rank.getLabel() + "] - taxon :" + taxonToString(taxon));
                         }
 
                         // Step through direct taxonomic children for accumulation
@@ -578,7 +639,7 @@ public class TransmissionEngineDistribution { //TODO extends IoBase?
 
                             getSession().setReadOnly(taxonNode, true);
                             if(logger.isTraceEnabled()){
-                                logger.trace("                   subtaxon :" + subTaxonNode.getTaxon().getTitleCache());
+                                logger.trace("                   subtaxon :" + taxonToString(subTaxonNode.getTaxon()));
                             }
 
                             for(Distribution distribution : distributionsFor(subTaxonNode.getTaxon()) ) {
@@ -765,8 +826,15 @@ public class TransmissionEngineDistribution { //TODO extends IoBase?
      * @return
      */
     private List<Distribution> distributionsFor(Taxon taxon) {
-        return descriptionService
-                .listDescriptionElementsForTaxon(taxon, null, Distribution.class, null, null, null);
+        List<Distribution> distributions = new ArrayList<Distribution>();
+        for(TaxonDescription description: taxon.getDescriptions()) {
+            for(DescriptionElementBase deb : description.getElements()) {
+                if(deb instanceof Distribution) {
+                    distributions.add((Distribution)deb);
+                }
+            }
+        }
+        return distributions;
     }
 
     /**