ref #8754: implement delete method for descriptiveDataSet
[cdmlib.git] / cdmlib-services / src / main / java / eu / etaxonomy / cdm / api / service / DescriptiveDataSetService.java
index 3bea87fc492ebf92a2688a975bb37cc52ec41cb4..ac28361c724123918ef635e82079ca51c83a7e93 100644 (file)
@@ -3,13 +3,12 @@ package eu.etaxonomy.cdm.api.service;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collection;
+import java.util.Collections;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
-import java.util.Map.Entry;
 import java.util.Optional;
-import java.util.OptionalDouble;
 import java.util.Set;
 import java.util.UUID;
 import java.util.stream.Collectors;
@@ -20,7 +19,6 @@ import org.springframework.stereotype.Service;
 import org.springframework.transaction.annotation.Transactional;
 
 import eu.etaxonomy.cdm.api.service.UpdateResult.Status;
-import eu.etaxonomy.cdm.api.service.config.DescriptionAggregationConfiguration;
 import eu.etaxonomy.cdm.api.service.config.IdentifiableServiceConfiguratorImpl;
 import eu.etaxonomy.cdm.api.service.dto.RowWrapperDTO;
 import eu.etaxonomy.cdm.api.service.dto.SpecimenRowWrapperDTO;
@@ -31,7 +29,6 @@ import eu.etaxonomy.cdm.hibernate.HibernateProxyHelper;
 import eu.etaxonomy.cdm.model.common.IdentifiableSource;
 import eu.etaxonomy.cdm.model.common.Language;
 import eu.etaxonomy.cdm.model.description.CategoricalData;
-import eu.etaxonomy.cdm.model.description.Character;
 import eu.etaxonomy.cdm.model.description.DescriptionBase;
 import eu.etaxonomy.cdm.model.description.DescriptionElementBase;
 import eu.etaxonomy.cdm.model.description.DescriptionType;
@@ -42,7 +39,6 @@ import eu.etaxonomy.cdm.model.description.IndividualsAssociation;
 import eu.etaxonomy.cdm.model.description.PolytomousKey;
 import eu.etaxonomy.cdm.model.description.QuantitativeData;
 import eu.etaxonomy.cdm.model.description.SpecimenDescription;
-import eu.etaxonomy.cdm.model.description.StateData;
 import eu.etaxonomy.cdm.model.description.StatisticalMeasure;
 import eu.etaxonomy.cdm.model.description.TaxonDescription;
 import eu.etaxonomy.cdm.model.description.TextData;
@@ -50,7 +46,6 @@ import eu.etaxonomy.cdm.model.location.NamedArea;
 import eu.etaxonomy.cdm.model.occurrence.DerivedUnit;
 import eu.etaxonomy.cdm.model.occurrence.FieldUnit;
 import eu.etaxonomy.cdm.model.occurrence.SpecimenOrObservationBase;
-import eu.etaxonomy.cdm.model.reference.OriginalSourceType;
 import eu.etaxonomy.cdm.model.taxon.Classification;
 import eu.etaxonomy.cdm.model.taxon.Taxon;
 import eu.etaxonomy.cdm.model.taxon.TaxonBase;
@@ -127,7 +122,7 @@ public class DescriptiveDataSetService
             RowWrapperDTO rowWrapper = null;
             // only viable descriptions are aggregated, literature or default descriptions
             if(HibernateProxyHelper.isInstanceOf(description, TaxonDescription.class) &&
-                    (description.getTypes().contains(DescriptionType.AGGREGATED)
+                    (description.isAggregatedStructuredDescription()
                             || description.getTypes().contains(DescriptionType.DEFAULT_VALUES_FOR_AGGREGATION)
                             || description.getTypes().contains(DescriptionType.SECONDARY_DATA)
                             )){
@@ -148,9 +143,19 @@ public class DescriptiveDataSetService
     @Override
     public Collection<SpecimenNodeWrapper> loadSpecimens(DescriptiveDataSet descriptiveDataSet){
         List<UUID> filteredNodes = findFilteredTaxonNodes(descriptiveDataSet);
+        if(filteredNodes.isEmpty()){
+            return Collections.EMPTY_SET;
+        }
         return occurrenceService.listUuidAndTitleCacheByAssociatedTaxon(filteredNodes, null, null);
     }
 
+    @Override
+    public Collection<SpecimenNodeWrapper> loadSpecimens(UUID descriptiveDataSetUuid){
+        DescriptiveDataSet dataSet = load(descriptiveDataSetUuid);
+        return loadSpecimens(dataSet);
+    }
+
+
     @Override
     public List<UUID> findFilteredTaxonNodes(DescriptiveDataSet descriptiveDataSet){
         TaxonNodeFilter filter = TaxonNodeFilter.NewRankInstance(descriptiveDataSet.getMinRank(), descriptiveDataSet.getMaxRank());
@@ -175,19 +180,21 @@ public class DescriptiveDataSetService
     }
 
     private TaxonDescription recurseDefaultDescription(TaxonNode node, DescriptiveDataSet dataSet){
-        TaxonDescription defaultDescription = findTaxonDescriptionByDescriptionType(dataSet, node.getTaxon(), DescriptionType.DEFAULT_VALUES_FOR_AGGREGATION);
-        if(defaultDescription==null && node.getParent()!=null){
-            defaultDescription = recurseDefaultDescription(node.getParent(), dataSet);
+        TaxonDescription defaultDescription = null;
+        if(node!=null && node.getTaxon()!=null){
+            defaultDescription = findTaxonDescriptionByDescriptionType(dataSet, node.getTaxon(), DescriptionType.DEFAULT_VALUES_FOR_AGGREGATION);
+            if(defaultDescription==null && node.getParent()!=null){
+                defaultDescription = recurseDefaultDescription(node.getParent(), dataSet);
+            }
         }
         return defaultDescription;
     }
 
     private TaxonNode findTaxonNodeForDescription(SpecimenDescription description, DescriptiveDataSet descriptiveDataSet){
         SpecimenOrObservationBase specimen = description.getDescribedSpecimenOrObservation();
-        TaxonNode taxonNode = null;
         //get taxon node
 
-        Set<IndividualsAssociation> associations = (Set<IndividualsAssociation>) descriptiveDataSet.getDescriptions()
+        Set<IndividualsAssociation> associations = (Set<IndividualsAssociation>)descriptiveDataSet.getDescriptions()
                 .stream()
                 .flatMap(desc->desc.getElements().stream())// put all description element in one stream
                 .filter(element->element instanceof IndividualsAssociation)
@@ -226,17 +233,26 @@ public class DescriptiveDataSetService
         UpdateResult result = new UpdateResult();
         DescriptiveDataSet dataSet = load(datasetUuid);
         result.setCdmEntity(dataSet);
+
+        List<UUID> taxonUuids = wrappers.stream().map(wrapper->wrapper.getTaxonNode().getTaxon().getUuid()).collect(Collectors.toList());
+        List<TaxonBase> taxa = taxonService.load(taxonUuids, Arrays.asList(new String[]{"descriptions"}));
+
         for (SpecimenNodeWrapper wrapper : wrappers) {
+            Optional<TaxonBase> findAny = taxa.stream().filter(taxon->taxon.getUuid().equals(wrapper.getTaxonNode().getTaxon().getUuid())).findAny();
+            if(!findAny.isPresent()){
+                result.addException(new IllegalArgumentException("Could not create wrapper for "+wrapper.getUuidAndTitleCache().getTitleCache()));
+                continue;
+            }
+            Taxon taxon = (Taxon) findAny.get();
             UUID taxonDescriptionUuid = wrapper.getTaxonDescriptionUuid();
             TaxonDescription taxonDescription = null;
             if(taxonDescriptionUuid!=null){
                 taxonDescription = (TaxonDescription) descriptionService.load(taxonDescriptionUuid);
             }
             if(taxonDescription==null){
-                Optional<TaxonDescription> associationDescriptionOptional = wrapper.getTaxonNode().getTaxon().getDescriptions().stream()
+                Optional<TaxonDescription> associationDescriptionOptional = taxon.getDescriptions().stream()
                         .filter(desc->desc.getTypes().contains(DescriptionType.INDIVIDUALS_ASSOCIATION))
                         .findFirst();
-                Taxon taxon = wrapper.getTaxonNode().getTaxon();
                 if(!associationDescriptionOptional.isPresent()){
                     taxonDescription = TaxonDescription.NewInstance(taxon);
                 }
@@ -349,136 +365,6 @@ public class DescriptiveDataSetService
         return findTaxonDescriptionByDescriptionType(dataSet, taxonNode.getTaxon(), descriptionType);
     }
 
-    @Override
-    @Transactional(readOnly=false)
-    public UpdateResult aggregate(UUID descriptiveDataSetUuid, DescriptionAggregationConfiguration config, IProgressMonitor monitor) {
-        DescriptiveDataSet dataSet = load(descriptiveDataSetUuid);
-        Set<DescriptionBase> descriptions = dataSet.getDescriptions();
-
-        monitor.beginTask("Aggregate data set", descriptions.size()*2);
-
-        UpdateResult result = new UpdateResult();
-        result.setCdmEntity(dataSet);
-
-        // delete all aggregation description of this dataset (DescriptionType.AGGREGATED)
-        Set<TaxonDescription> aggregations = dataSet.getDescriptions().stream()
-        .filter(aggDesc->aggDesc instanceof TaxonDescription)
-        .map(aggDesc->(TaxonDescription)aggDesc)
-        .filter(desc -> desc.getTypes().contains(DescriptionType.AGGREGATED))
-        .collect(Collectors.toSet());
-        aggregations.forEach(aggregation->dataSet.removeDescription(aggregation));
-        // also delete all their cloned source descriptions
-        Set<String> sourceUuids = aggregations.stream()
-        .flatMap(aggDesc->aggDesc.getSources().stream())
-        .filter(source->source.getType().equals(OriginalSourceType.Aggregation))
-        .map(aggSource->aggSource.getIdInSource())
-        .collect(Collectors.toSet());
-        for (String string : sourceUuids) {
-            try {
-                UUID uuid = UUID.fromString(string);
-                DescriptionBase sourceClone = descriptionService.load(uuid);
-                descriptionService.deleteDescription(sourceClone);
-            } catch (IllegalArgumentException|NullPointerException e) {
-                // ignore
-            }
-        }
-        //finally delete the aggregation description itself
-        aggregations.forEach(aggDesc->descriptionService.delete(aggDesc));
-
-        // sort descriptions by taxa
-        Map<TaxonNode, Set<UUID>> taxonNodeToSpecimenDescriptionMap = new HashMap<>();
-        for (DescriptionBase descriptionBase : descriptions) {
-            if(monitor.isCanceled()){
-                result.setAbort();
-                return result;
-            }
-
-            if(descriptionBase instanceof SpecimenDescription){
-                SpecimenDescription specimenDescription = HibernateProxyHelper.deproxy(descriptionBase, SpecimenDescription.class);
-                if(specimenDescription.getElements().stream().anyMatch(element->hasCharacterData(element))){
-                    TaxonNode taxonNode = findTaxonNodeForDescription(specimenDescription, dataSet);
-                    if(taxonNode!=null){
-                        addDescriptionToTaxonNodeMap(specimenDescription.getUuid(), taxonNode, taxonNodeToSpecimenDescriptionMap);
-                    }
-                }
-            }
-            monitor.worked(1);
-        }
-        if(config.isRecursiveAggregation()){
-            propagateDescriptionsToParentNodes(dataSet, taxonNodeToSpecimenDescriptionMap);
-        }
-        // aggregate per taxa
-        Map<UUID, UUID> specimenToClonedSourceDescription = new HashMap<>();
-        for (Entry<TaxonNode, Set<UUID>> entry: taxonNodeToSpecimenDescriptionMap.entrySet()) {
-            if(monitor.isCanceled()){
-                result.setAbort();
-                return result;
-            }
-            UUID taxonUuid = entry.getKey().getTaxon().getUuid();
-            Set<UUID> specimenDescriptionUuids = entry.getValue();
-            result.includeResult(aggregateDescription(taxonUuid, specimenDescriptionUuids, descriptiveDataSetUuid,
-                    specimenToClonedSourceDescription));
-            monitor.worked(1);
-        }
-        monitor.done();
-        return result;
-    }
-
-
-    private boolean hasCharacterData(DescriptionElementBase element) {
-        return (element instanceof CategoricalData && !((CategoricalData) element).getStatesOnly().isEmpty())
-                || (element instanceof QuantitativeData
-                        && !((QuantitativeData) element).getStatisticalValues().isEmpty());
-    }
-
-    private void addDescriptionToTaxonNodeMap(UUID descriptionUuid, TaxonNode taxonNode, Map<TaxonNode, Set<UUID>> taxonNodeToSpecimenDescriptionMap){
-        Set<UUID> specimenDescriptionUuids = taxonNodeToSpecimenDescriptionMap.get(taxonNode);
-        if(specimenDescriptionUuids==null){
-            specimenDescriptionUuids = new HashSet<>();
-        }
-        specimenDescriptionUuids.add(descriptionUuid);
-        taxonNodeToSpecimenDescriptionMap.put(taxonNode, specimenDescriptionUuids);
-    }
-
-    private void propagateDescriptionsToParentNodes(DescriptiveDataSet dataSet, Map<TaxonNode, Set<UUID>> taxonNodeToSpecimenDescriptionMap){
-        Map<TaxonNode, Set<UUID>> parentMap = new HashMap<>();
-        for (Entry<TaxonNode, Set<UUID>> entry: taxonNodeToSpecimenDescriptionMap.entrySet()) {
-            Set<UUID> descriptionUuids = entry.getValue();
-            TaxonNode node = entry.getKey();
-            TaxonNode parentNode = node.getParent();
-            while(parentNode!=null && isTaxonNodeInDescriptiveDataSet(parentNode, dataSet)){
-                for (UUID uuid : descriptionUuids) {
-                    addDescriptionToTaxonNodeMap(uuid, node.getParent(), parentMap);
-                }
-                parentNode = parentNode.getParent();
-            }
-        }
-        // merge parent map
-        for (Entry<TaxonNode, Set<UUID>> entry: parentMap.entrySet()) {
-            Set<UUID> descriptionUuids = entry.getValue();
-            TaxonNode node = entry.getKey();
-            for (UUID uuid : descriptionUuids) {
-                addDescriptionToTaxonNodeMap(uuid, node, taxonNodeToSpecimenDescriptionMap);
-            }
-        }
-    }
-
-    private boolean isTaxonNodeInDescriptiveDataSet(TaxonNode taxonNode, DescriptiveDataSet dataSet){
-        Set<TaxonNode> taxonSubtreeFilter = dataSet.getTaxonSubtreeFilter();
-        for (TaxonNode datasetNode : taxonSubtreeFilter) {
-            if(datasetNode.getUuid().equals(taxonNode.getUuid())){
-                return true;
-            }
-            List<TaxonNode> allChildren = taxonNodeService.loadChildNodesOfTaxonNode(datasetNode, null, true, true, null);
-            for (TaxonNode childNode : allChildren) {
-                if(childNode.getUuid().equals(taxonNode.getUuid())){
-                    return true;
-                }
-            }
-        }
-        return false;
-    }
-
     @Override
     @Transactional(readOnly=false)
     public UpdateResult generatePolytomousKey(UUID descriptiveDataSetUuid, UUID taxonUuid) {
@@ -506,162 +392,6 @@ public class DescriptiveDataSetService
         return result;
     }
 
-    @SuppressWarnings("unchecked")
-    private UpdateResult aggregateDescription(UUID taxonUuid, Set<UUID> specimenDescriptionUuids,
-            UUID descriptiveDataSetUuid, Map<UUID, UUID> specimenToClonedSourceDescription) {
-        UpdateResult result = new UpdateResult();
-
-        TaxonBase taxonBase = taxonService.load(taxonUuid);
-        if(!(taxonBase instanceof Taxon)){
-            result.addException(new ClassCastException("The given taxonUUID does not belong to a taxon"));
-            result.setError();
-            return result;
-        }
-        Taxon taxon = (Taxon)taxonBase;
-        List<DescriptionBase> descriptions = descriptionService.load(new ArrayList<>(specimenDescriptionUuids), null);
-        List<SpecimenDescription> specimenDescriptions = descriptions.stream()
-                .filter(d -> d instanceof SpecimenDescription)
-                .map(d -> (SpecimenDescription) d)
-                .collect(Collectors.toList());
-        Map<Character, List<DescriptionElementBase>> featureToElementMap = new HashMap<>();
-
-        DescriptiveDataSet dataSet = load(descriptiveDataSetUuid);
-        if(dataSet==null){
-            result.addException(new IllegalArgumentException("Could not find data set for uuid "+descriptiveDataSetUuid));
-            result.setAbort();
-            return result;
-        }
-
-        //extract all character description elements
-        for (DescriptionBase<?> description : specimenDescriptions) {
-            description.getElements().stream()
-            //filter out elements that do not have a Character as Feature
-            .filter(element->HibernateProxyHelper.isInstanceOf(element.getFeature(), Character.class))
-            .forEach(ele->addCharacterToMap(featureToElementMap, ele));
-        }
-
-        TaxonDescription aggregationDescription = createAggregationDescription(taxon, dataSet);
-
-        aggregateCharacterData(featureToElementMap, aggregationDescription);
-
-        // add sources to aggregation description
-        // create a snapshot of those descriptions that were used to create the aggregated descriptions
-        // TODO implement when the clones descriptions can be attached to taxon
-        // descriptions as sources
-        specimenDescriptions.forEach(specimenDescription -> addSourceDescription(aggregationDescription, specimenDescription,
-                specimenToClonedSourceDescription));
-
-        result.addUpdatedObject(taxon);
-        result.addUpdatedObject(aggregationDescription);
-
-        return result;
-    }
-
-    private void aggregateCharacterData(Map<Character, List<DescriptionElementBase>> featureToElementMap,
-            TaxonDescription aggregationDescription) {
-        for(Entry<Character, List<DescriptionElementBase>> entry:featureToElementMap.entrySet()){
-            Character character = entry.getKey();
-            List<DescriptionElementBase> elements = entry.getValue();
-            //aggregate categorical data
-            if(character.isSupportsCategoricalData()){
-                aggregateCategoricalData(aggregationDescription, character, elements);
-            }
-            //aggregate quantitative data
-            else if(character.isSupportsQuantitativeData()){
-                aggregateQuantitativeData(aggregationDescription, character, elements);
-            }
-        }
-    }
-
-    private TaxonDescription createAggregationDescription(Taxon taxon, DescriptiveDataSet dataSet) {
-        TaxonDescription aggregationDescription = TaxonDescription.NewInstance(taxon);
-        aggregationDescription.setTitleCache("[Aggregation] "+dataSet.getTitleCache(), true);
-        aggregationDescription.getTypes().add(DescriptionType.AGGREGATED);
-        aggregationDescription.addSource(IdentifiableSource.NewInstance(OriginalSourceType.Aggregation));
-        aggregationDescription.addDescriptiveDataSet(dataSet);
-        return aggregationDescription;
-    }
-
-    private void addSourceDescription(TaxonDescription taxonDescription, SpecimenDescription specimenDescription,
-            Map<UUID, UUID> specimenToClonedSourceDescription) {
-        UUID sourceCloneUuid = specimenToClonedSourceDescription.get(specimenDescription.getUuid());
-        if(sourceCloneUuid!=null){
-            addAggregationSource(taxonDescription, sourceCloneUuid);
-        }
-        else{
-            SpecimenOrObservationBase<?> specimenOrObservation = specimenDescription.getDescribedSpecimenOrObservation();
-            SpecimenDescription clone = (SpecimenDescription) specimenDescription.clone();
-            clone.getTypes().add(DescriptionType.CLONE_FOR_SOURCE);
-            specimenOrObservation.addDescription(clone);
-
-            addAggregationSource(taxonDescription, clone.getUuid());
-            specimenToClonedSourceDescription.put(specimenDescription.getUuid(), clone.getUuid());
-        }
-    }
-
-    private void addAggregationSource(TaxonDescription taxonDescription, UUID cloneUuid) {
-        IdentifiableSource source = IdentifiableSource.NewInstance(OriginalSourceType.Aggregation);
-        source.setIdInSource(cloneUuid.toString());
-        source.setIdNamespace("SpecimenDescription");
-        taxonDescription.addSource(source);
-    }
-
-    private void aggregateQuantitativeData(TaxonDescription description, Character character,
-            List<DescriptionElementBase> elements) {
-        QuantitativeData aggregate = QuantitativeData.NewInstance(character);
-        List<Float> values = new ArrayList<>();
-        float sampleSize = 0;
-        for (DescriptionElementBase element : elements) {
-            if(element instanceof QuantitativeData){
-                QuantitativeData quantitativeData = (QuantitativeData)element;
-                values.addAll(quantitativeData.getStatisticalValues().stream()
-                .filter(value->value.getType().equals(StatisticalMeasure.EXACT_VALUE()))
-                .map(exact->exact.getValue())
-                .collect(Collectors.toList()));
-                if(quantitativeData.getMin()!=null){
-                    values.add(quantitativeData.getMin());
-                }
-                if(quantitativeData.getMax()!=null){
-                    values.add(quantitativeData.getMax());
-                }
-                sampleSize++;
-            }
-        }
-        aggregate.setSampleSize(sampleSize, null);
-        OptionalDouble min = values.stream().mapToDouble(value->(double)value).min();
-        OptionalDouble max = values.stream().mapToDouble(value->(double)value).max();
-        OptionalDouble avg = values.stream().mapToDouble(value->(double)value).average();
-        if(min.isPresent()){
-            aggregate.setMinimum((float)min.getAsDouble(), null);
-        }
-        if(max.isPresent()){
-            aggregate.setMaximum((float)max.getAsDouble(), null);
-        }
-        if(avg.isPresent()){
-            aggregate.setAverage((float)avg.getAsDouble(), null);
-        }
-        description.addElement(aggregate);
-    }
-
-    private void aggregateCategoricalData(TaxonDescription description, Character character,
-            List<DescriptionElementBase> elements) {
-        CategoricalData aggregate = CategoricalData.NewInstance(character);
-        elements.stream()
-        .filter(element->element instanceof CategoricalData)
-        .flatMap(categoricalData->((CategoricalData)categoricalData).getStateData().stream())
-        .forEach(stateData->aggregate.addStateData((StateData) stateData.clone()));
-        description.addElement(aggregate);
-    }
-
-    private void addCharacterToMap(Map<Character, List<DescriptionElementBase>> featureToElementMap, DescriptionElementBase descriptionElement) {
-        List<DescriptionElementBase> list = featureToElementMap.get(descriptionElement.getFeature());
-        if(list==null){
-            list = new ArrayList<>();
-        }
-        list.add(descriptionElement);
-        featureToElementMap.put(HibernateProxyHelper.deproxy(descriptionElement.getFeature(), Character.class), list);
-    }
-
     @Override
     @Transactional(readOnly=false)
     public DeleteResult removeDescription(UUID descriptionUuid, UUID descriptiveDataSetUuid) {
@@ -676,7 +406,8 @@ public class DescriptiveDataSetService
             result.addDeletedObject(descriptionBase);
             // remove taxon description with IndividualsAssociation from data set
             if(descriptionBase instanceof SpecimenDescription){
-                Set<IndividualsAssociation> associations = (Set<IndividualsAssociation>) dataSet.getDescriptions()
+                @SuppressWarnings("cast")
+                Set<IndividualsAssociation> associations = (Set<IndividualsAssociation>)dataSet.getDescriptions()
                         .stream()
                         .flatMap(desc->desc.getElements().stream())// put all description element in one stream
                         .filter(element->element instanceof IndividualsAssociation)
@@ -696,20 +427,37 @@ public class DescriptiveDataSetService
         return result;
     }
 
+    @Override
+    @Transactional(readOnly = false)
+    public DeleteResult delete(UUID datasetUuid){
+        DescriptiveDataSet dataSet = dao.load(datasetUuid);
+        DeleteResult result = new DeleteResult();
+        if (!dataSet.getDescriptions().isEmpty()){
+            Set<DescriptionBase> descriptions = new HashSet();;
+            for (DescriptionBase desc: dataSet.getDescriptions()){
+                descriptions.add(desc);
+            }
+            DeleteResult descriptionResult;
+            for (DescriptionBase desc: descriptions){
+                dataSet.removeDescription(desc);
+                descriptionResult = descriptionService.deleteDescription(desc);
+                result.includeResult(descriptionResult);
+            }
+
+
+        }
+        dao.delete(dataSet);
+        result.addDeletedObject(dataSet);
+        return result;
+    }
+
     @Override
     @Transactional(readOnly=false)
     public TaxonRowWrapperDTO createTaxonDescription(UUID dataSetUuid, UUID taxonNodeUuid, DescriptionType descriptionType){
         DescriptiveDataSet dataSet = load(dataSetUuid);
         TaxonNode taxonNode = taxonNodeService.load(taxonNodeUuid, Arrays.asList("taxon"));
         TaxonDescription newTaxonDescription = TaxonDescription.NewInstance(taxonNode.getTaxon());
-        String tag = "";
-        if(descriptionType.equals(DescriptionType.DEFAULT_VALUES_FOR_AGGREGATION)){
-            tag = "[Default]";
-        }
-        else if(descriptionType.equals(DescriptionType.SECONDARY_DATA)){
-            tag = "[Literature]";
-        }
-        newTaxonDescription.setTitleCache(tag+" "+dataSet.getLabel()+": "+newTaxonDescription.generateTitle(), true); //$NON-NLS-2$
+        newTaxonDescription.setTitleCache(dataSet.getLabel()+": "+newTaxonDescription.generateTitle(), true); //$NON-NLS-2$
         newTaxonDescription.getTypes().add(descriptionType);
 
         dataSet.getDescriptiveSystem().getDistinctTerms().forEach(wsFeature->{
@@ -752,7 +500,7 @@ public class DescriptiveDataSetService
             for (DescriptionElementBase specimenDescriptionElement : specimenDescription.getElements()) {
                 Feature feature = specimenDescriptionElement.getFeature();
                 specimenDescriptionFeatures.add(feature);
-                if(datasetFeatures.contains(feature)){
+                if(datasetFeatures.contains(feature) && RowWrapperDTO.hasData(specimenDescriptionElement)){
                     matchingDescriptionElements.add(specimenDescriptionElement);
                 }
             }
@@ -818,24 +566,6 @@ public class DescriptiveDataSetService
             }
         }
 
-        //add all remaining description elements to the new description
-        for(Feature wsFeature:datasetFeatures){
-            boolean featureFound = false;
-            for(DescriptionElementBase element:newDesription.getElements()){
-                if(element.getFeature().equals(wsFeature)){
-                    featureFound = true;
-                    break;
-                }
-            }
-            if(!featureFound){
-                if(wsFeature.isSupportsCategoricalData()){
-                    newDesription.addElement(CategoricalData.NewInstance(wsFeature));
-                }
-                else if(wsFeature.isSupportsQuantitativeData()){
-                    newDesription.addElement(QuantitativeData.NewInstance(wsFeature));
-                }
-            }
-        }
         //add sources of data set
         if(addDatasetSource){
             dataSet.getSources().forEach(source->{
@@ -1027,6 +757,8 @@ public class DescriptiveDataSetService
             return DescriptiveDataSetService.this;
         }
 
+
+
     }
 
 }