Merge branch 'release/5.3.0' 5.3.0
authorjenkins <jenkins-int@bgbm.org>
Mon, 17 Sep 2018 09:14:01 +0000 (11:14 +0200)
committerjenkins <jenkins-int@bgbm.org>
Mon, 17 Sep 2018 09:14:01 +0000 (11:14 +0200)
168 files changed:
cdmlib-cache/pom.xml
cdmlib-cache/src/main/java/eu/etaxonomy/cdm/cache/CacheLoader.java
cdmlib-cache/src/main/java/eu/etaxonomy/cdm/cache/CdmEntityCacheKey.java
cdmlib-cache/src/main/java/eu/etaxonomy/cdm/cache/CdmTransientEntityCacher.java
cdmlib-commons/pom.xml
cdmlib-commons/src/main/java/eu/etaxonomy/cdm/common/ExcelUtils.java
cdmlib-commons/src/main/java/eu/etaxonomy/cdm/common/StreamUtils.java
cdmlib-commons/src/main/java/eu/etaxonomy/cdm/common/UTF8.java
cdmlib-db/pom.xml
cdmlib-ext/pom.xml
cdmlib-ext/src/main/java/eu/etaxonomy/cdm/ext/geo/ShpAttributesToNamedAreaMapper.java
cdmlib-io/pom.xml
cdmlib-io/src/main/java/eu/etaxonomy/cdm/io/cdmLight/CdmLightClassificationExport.java
cdmlib-io/src/main/java/eu/etaxonomy/cdm/io/common/ExportResult.java
cdmlib-io/src/main/java/eu/etaxonomy/cdm/io/csv/caryophyllales/out/CsvNameExport.java
cdmlib-io/src/main/java/eu/etaxonomy/cdm/io/distribution/excelupdate/ExcelDistributionUpdate.java
cdmlib-io/src/main/java/eu/etaxonomy/cdm/io/excel/common/ExcelImportBase.java
cdmlib-io/src/main/java/eu/etaxonomy/cdm/io/excel/common/ExcelImportConfiguratorBase.java
cdmlib-io/src/main/java/eu/etaxonomy/cdm/io/excel/common/ExcelImportState.java
cdmlib-io/src/main/java/eu/etaxonomy/cdm/io/excel/common/ExcelTaxonOrSpecimenImportBase.java
cdmlib-io/src/main/java/eu/etaxonomy/cdm/io/excel/distribution/DistributionImport.java
cdmlib-io/src/main/java/eu/etaxonomy/cdm/io/excel/taxa/TaxonExcelImportBase.java
cdmlib-io/src/main/java/eu/etaxonomy/cdm/io/media/in/MediaExcelImport.java
cdmlib-io/src/main/java/eu/etaxonomy/cdm/io/specimen/SpecimenImportBase.java
cdmlib-io/src/main/java/eu/etaxonomy/cdm/io/specimen/UnitsGatheringEvent.java
cdmlib-io/src/main/java/eu/etaxonomy/cdm/io/specimen/abcd206/in/Abcd206Import.java
cdmlib-io/src/main/java/eu/etaxonomy/cdm/io/specimen/abcd206/in/Abcd206ImportParser.java
cdmlib-io/src/main/java/eu/etaxonomy/cdm/io/specimen/abcd206/in/Abcd206ImportState.java
cdmlib-io/src/main/java/eu/etaxonomy/cdm/io/specimen/abcd206/in/Abcd206XMLFieldGetter.java
cdmlib-io/src/main/java/eu/etaxonomy/cdm/io/specimen/abcd206/in/Identification.java
cdmlib-io/src/main/java/eu/etaxonomy/cdm/io/specimen/abcd206/in/molecular/AbcdDnaParser.java
cdmlib-io/src/main/java/eu/etaxonomy/cdm/io/specimen/abcd206/in/molecular/AbcdGgbnParser.java
cdmlib-io/src/main/java/eu/etaxonomy/cdm/io/specimen/excel/in/ExtensionTypeExcelImport.java
cdmlib-io/src/main/java/eu/etaxonomy/cdm/io/specimen/excel/in/NamedAreaLevelExcelImport.java
cdmlib-io/src/main/java/eu/etaxonomy/cdm/io/specimen/excel/in/SpecimenCdmExcelImport.java
cdmlib-io/src/main/java/eu/etaxonomy/cdm/io/specimen/excel/in/SpecimenSythesysExcelImport.java
cdmlib-io/src/main/java/eu/etaxonomy/cdm/io/tcsxml/in/TcsXmlTaxonRelationsImport.java
cdmlib-io/src/test/java/eu/etaxonomy/cdm/io/sdd/in/SDDImportExportTest.java
cdmlib-model/pom.xml
cdmlib-model/src/main/java/eu/etaxonomy/cdm/exception/FilterException.java [new file with mode: 0644]
cdmlib-model/src/main/java/eu/etaxonomy/cdm/format/CdmFormatterFactory.java
cdmlib-model/src/main/java/eu/etaxonomy/cdm/format/taxon/TaxonRelationshipFormatter.java
cdmlib-model/src/main/java/eu/etaxonomy/cdm/hibernate/search/AcceptedTaxonBridge.java
cdmlib-model/src/main/java/eu/etaxonomy/cdm/hibernate/search/DescriptionBaseClassBridge.java
cdmlib-model/src/main/java/eu/etaxonomy/cdm/hibernate/search/UuidBridge.java
cdmlib-model/src/main/java/eu/etaxonomy/cdm/model/ICdmCacher.java
cdmlib-model/src/main/java/eu/etaxonomy/cdm/model/agent/AgentBase.java
cdmlib-model/src/main/java/eu/etaxonomy/cdm/model/common/AnnotatableEntity.java
cdmlib-model/src/main/java/eu/etaxonomy/cdm/model/common/ITreeNode.java
cdmlib-model/src/main/java/eu/etaxonomy/cdm/model/common/Identifier.java
cdmlib-model/src/main/java/eu/etaxonomy/cdm/model/common/OriginalSourceBase.java
cdmlib-model/src/main/java/eu/etaxonomy/cdm/model/common/TimePeriod.java
cdmlib-model/src/main/java/eu/etaxonomy/cdm/model/common/TreeIndex.java
cdmlib-model/src/main/java/eu/etaxonomy/cdm/model/description/FeatureNode.java
cdmlib-model/src/main/java/eu/etaxonomy/cdm/model/description/SpecimenDescription.java
cdmlib-model/src/main/java/eu/etaxonomy/cdm/model/description/TaxonInteraction.java
cdmlib-model/src/main/java/eu/etaxonomy/cdm/model/location/Point.java
cdmlib-model/src/main/java/eu/etaxonomy/cdm/model/media/MediaRepresentation.java
cdmlib-model/src/main/java/eu/etaxonomy/cdm/model/molecular/Sequence.java
cdmlib-model/src/main/java/eu/etaxonomy/cdm/model/name/NomenclaturalStatus.java
cdmlib-model/src/main/java/eu/etaxonomy/cdm/model/name/NomenclaturalStatusType.java
cdmlib-model/src/main/java/eu/etaxonomy/cdm/model/name/TaxonName.java
cdmlib-model/src/main/java/eu/etaxonomy/cdm/model/occurrence/Collection.java
cdmlib-model/src/main/java/eu/etaxonomy/cdm/model/occurrence/SpecimenOrObservationBase.java
cdmlib-model/src/main/java/eu/etaxonomy/cdm/model/reference/Reference.java
cdmlib-model/src/main/java/eu/etaxonomy/cdm/model/taxon/Taxon.java
cdmlib-model/src/main/java/eu/etaxonomy/cdm/model/taxon/TaxonBase.java
cdmlib-model/src/main/java/eu/etaxonomy/cdm/model/taxon/TaxonNode.java
cdmlib-model/src/main/java/eu/etaxonomy/cdm/model/taxon/TaxonRelationshipType.java
cdmlib-model/src/main/java/eu/etaxonomy/cdm/strategy/cache/reference/DefaultReferenceCacheStrategy.java
cdmlib-model/src/main/java/eu/etaxonomy/cdm/strategy/cache/taxon/TaxonBaseShortSecCacheStrategy.java
cdmlib-model/src/main/java/eu/etaxonomy/cdm/strategy/parser/TimePeriodParser.java
cdmlib-model/src/test/java/eu/etaxonomy/cdm/format/TaxonRelationshipFormatterTest.java
cdmlib-model/src/test/java/eu/etaxonomy/cdm/model/common/TimePeriodTest.java
cdmlib-model/src/test/java/eu/etaxonomy/cdm/model/common/TreeIndexTest.java [new file with mode: 0644]
cdmlib-model/src/test/java/eu/etaxonomy/cdm/strategy/parser/TimePeriodParserTest.java
cdmlib-persistence/pom.xml
cdmlib-persistence/src/main/java/eu/etaxonomy/cdm/common/IoResultBase.java
cdmlib-persistence/src/main/java/eu/etaxonomy/cdm/persistence/dao/common/ICdmEntityDao.java
cdmlib-persistence/src/main/java/eu/etaxonomy/cdm/persistence/dao/common/IIdentifiableDao.java
cdmlib-persistence/src/main/java/eu/etaxonomy/cdm/persistence/dao/common/ISearchableDao.java
cdmlib-persistence/src/main/java/eu/etaxonomy/cdm/persistence/dao/hibernate/common/CdmEntityDaoBase.java
cdmlib-persistence/src/main/java/eu/etaxonomy/cdm/persistence/dao/hibernate/common/IdentifiableDaoBase.java
cdmlib-persistence/src/main/java/eu/etaxonomy/cdm/persistence/dao/hibernate/common/VersionableDaoBase.java
cdmlib-persistence/src/main/java/eu/etaxonomy/cdm/persistence/dao/hibernate/description/DescriptionElementDaoImpl.java
cdmlib-persistence/src/main/java/eu/etaxonomy/cdm/persistence/dao/hibernate/taxon/ClassificationDaoHibernateImpl.java
cdmlib-persistence/src/main/java/eu/etaxonomy/cdm/persistence/dao/hibernate/taxon/TaxonDaoHibernateImpl.java
cdmlib-persistence/src/main/java/eu/etaxonomy/cdm/persistence/dao/hibernate/taxon/TaxonNodeDaoHibernateImpl.java
cdmlib-persistence/src/main/java/eu/etaxonomy/cdm/persistence/dao/taxon/IClassificationDao.java
cdmlib-persistence/src/main/java/eu/etaxonomy/cdm/persistence/dao/taxon/ITaxonDao.java
cdmlib-persistence/src/main/java/eu/etaxonomy/cdm/persistence/dto/MergeResult.java
cdmlib-persistence/src/main/java/eu/etaxonomy/cdm/persistence/hibernate/permission/TargetEntityStates.java
cdmlib-persistence/src/main/java/eu/etaxonomy/cdm/persistence/hibernate/permission/voter/RegistrationVoter.java
cdmlib-persistence/src/test/java/eu/etaxonomy/cdm/persistence/dao/hibernate/taxon/ClassificationDaoHibernateImplTest.java
cdmlib-persistence/src/test/java/eu/etaxonomy/cdm/persistence/dao/hibernate/taxon/TaxonDaoHibernateImplTest.java
cdmlib-persistence/src/test/java/eu/etaxonomy/cdm/persistence/dao/hibernate/taxon/TaxonNodeDaoHibernateImplTest.java
cdmlib-persistence/src/test/java/eu/etaxonomy/cdm/persistence/dao/initializer/AdvancedBeanInitializerTest.java
cdmlib-persistence/src/test/resources/eu/etaxonomy/cdm/persistence/dao/hibernate/taxon/TaxonDaoHibernateImplTest.xml
cdmlib-print/pom.xml
cdmlib-print/src/main/java/eu/etaxonomy/cdm/print/LocalXMLEntityFactory.java
cdmlib-remote-webapp/pom.xml
cdmlib-remote-webapp/src/main/resources/log4j.properties
cdmlib-remote-webapp/src/main/webapp/WEB-INF/datasources/configurable.xml
cdmlib-remote-webapp/src/test/resources/log4j.properties
cdmlib-remote/pom.xml
cdmlib-remote/src/main/java/eu/etaxonomy/cdm/remote/controller/BaseController.java
cdmlib-remote/src/main/java/eu/etaxonomy/cdm/remote/controller/BaseListController.java
cdmlib-remote/src/main/java/eu/etaxonomy/cdm/remote/controller/ClassificationController.java
cdmlib-remote/src/main/java/eu/etaxonomy/cdm/remote/controller/ClassificationPortalListController.java
cdmlib-remote/src/main/java/eu/etaxonomy/cdm/remote/controller/DescriptionElementListController.java
cdmlib-remote/src/main/java/eu/etaxonomy/cdm/remote/controller/HttpStatusMessage.java
cdmlib-remote/src/main/java/eu/etaxonomy/cdm/remote/controller/OccurrenceController.java
cdmlib-remote/src/main/java/eu/etaxonomy/cdm/remote/controller/RegistrationController.java
cdmlib-remote/src/main/java/eu/etaxonomy/cdm/remote/controller/TaxonController.java
cdmlib-remote/src/main/java/eu/etaxonomy/cdm/remote/controller/TaxonListController.java
cdmlib-remote/src/main/java/eu/etaxonomy/cdm/remote/controller/TaxonNodeController.java
cdmlib-remote/src/main/java/eu/etaxonomy/cdm/remote/controller/TaxonPortalController.java
cdmlib-remote/src/main/resources/log4j.properties
cdmlib-remote/src/test/resources/eu/etaxonomy/cdm/applicationContext-test.xml
cdmlib-remote/src/test/resources/log4j.properties
cdmlib-services/pom.xml
cdmlib-services/src/main/java/eu/etaxonomy/cdm/api/application/CdmRepository.java
cdmlib-services/src/main/java/eu/etaxonomy/cdm/api/cache/CdmCacher.java
cdmlib-services/src/main/java/eu/etaxonomy/cdm/api/cache/CdmTermCacher.java
cdmlib-services/src/main/java/eu/etaxonomy/cdm/api/config/EhCacheConfiguration.java
cdmlib-services/src/main/java/eu/etaxonomy/cdm/api/service/ClassificationServiceImpl.java
cdmlib-services/src/main/java/eu/etaxonomy/cdm/api/service/CollectionServiceImpl.java
cdmlib-services/src/main/java/eu/etaxonomy/cdm/api/service/DescriptionServiceImpl.java
cdmlib-services/src/main/java/eu/etaxonomy/cdm/api/service/DescriptiveDataSetService.java
cdmlib-services/src/main/java/eu/etaxonomy/cdm/api/service/IClassificationService.java
cdmlib-services/src/main/java/eu/etaxonomy/cdm/api/service/IDescriptionService.java
cdmlib-services/src/main/java/eu/etaxonomy/cdm/api/service/IDescriptiveDataSetService.java
cdmlib-services/src/main/java/eu/etaxonomy/cdm/api/service/IIdentifiableEntityService.java
cdmlib-services/src/main/java/eu/etaxonomy/cdm/api/service/IService.java
cdmlib-services/src/main/java/eu/etaxonomy/cdm/api/service/ITaxonNodeService.java
cdmlib-services/src/main/java/eu/etaxonomy/cdm/api/service/ITaxonService.java
cdmlib-services/src/main/java/eu/etaxonomy/cdm/api/service/IdentifiableServiceBase.java
cdmlib-services/src/main/java/eu/etaxonomy/cdm/api/service/OccurrenceServiceImpl.java
cdmlib-services/src/main/java/eu/etaxonomy/cdm/api/service/ServiceBase.java
cdmlib-services/src/main/java/eu/etaxonomy/cdm/api/service/TaxonNodeServiceImpl.java
cdmlib-services/src/main/java/eu/etaxonomy/cdm/api/service/TaxonServiceImpl.java
cdmlib-services/src/main/java/eu/etaxonomy/cdm/api/service/config/FindTaxaAndNamesConfiguratorImpl.java
cdmlib-services/src/main/java/eu/etaxonomy/cdm/api/service/config/IFindTaxaAndNamesConfigurator.java
cdmlib-services/src/main/java/eu/etaxonomy/cdm/api/service/config/IIdentifiableEntityServiceConfigurator.java
cdmlib-services/src/main/java/eu/etaxonomy/cdm/api/service/config/IdentifiableServiceConfiguratorImpl.java
cdmlib-services/src/main/java/eu/etaxonomy/cdm/api/service/dto/CdmEntityIdentifier.java
cdmlib-services/src/main/java/eu/etaxonomy/cdm/api/service/dto/CollectionDTO.java
cdmlib-services/src/main/java/eu/etaxonomy/cdm/api/service/dto/GatheringEventDTO.java
cdmlib-services/src/main/java/eu/etaxonomy/cdm/api/service/dto/PreservedSpecimenDTO.java
cdmlib-services/src/main/java/eu/etaxonomy/cdm/api/service/dto/SequenceDTO.java
cdmlib-services/src/main/java/eu/etaxonomy/cdm/api/service/dto/SourceDTO.java [new file with mode: 0644]
cdmlib-services/src/main/java/eu/etaxonomy/cdm/api/service/dto/TaxonRelationshipsDTO.java
cdmlib-services/src/main/java/eu/etaxonomy/cdm/api/service/name/TypeDesignationComparator.java [changed mode: 0644->0755]
cdmlib-services/src/main/java/eu/etaxonomy/cdm/api/service/name/TypeDesignationSetManager.java
cdmlib-services/src/main/java/eu/etaxonomy/cdm/api/service/name/TypedEntityComparator.java [new file with mode: 0644]
cdmlib-services/src/main/java/eu/etaxonomy/cdm/api/service/search/LuceneIndexToolProviderImpl.java
cdmlib-services/src/main/java/eu/etaxonomy/cdm/api/service/search/QueryFactory.java
cdmlib-services/src/main/java/eu/etaxonomy/cdm/api/service/util/AbstractRelationshipEdge.java
cdmlib-services/src/test/java/eu/etaxonomy/cdm/api/service/ClassificationServiceImplTest.java
cdmlib-services/src/test/java/eu/etaxonomy/cdm/api/service/NameServiceImplTest.java
cdmlib-services/src/test/java/eu/etaxonomy/cdm/api/service/TaxonServiceSearchTest.java
cdmlib-services/src/test/java/eu/etaxonomy/cdm/api/service/TypeDesignationSetManagerTest.java [new file with mode: 0755]
cdmlib-services/src/test/java/eu/etaxonomy/cdm/api/service/dto/TaxonRelationshipsDTOTest.java
cdmlib-services/src/test/java/eu/etaxonomy/cdm/api/service/search/QueryFactoryTest.java
cdmlib-services/src/test/java/eu/etaxonomy/cdm/test/function/Datasource.java
cdmlib-services/src/test/resources/eu/etaxonomy/cdm/api/service/TaxonServiceSearchTest.xml
cdmlib-test/pom.xml
pom.xml

index 7fdc357ff835171d1172efd7927e659a4a6b54bc..c8b4062fc081accb25947313a6c382ba43dd805b 100644 (file)
@@ -4,7 +4,7 @@
   <parent>
     <groupId>eu.etaxonomy</groupId>
     <artifactId>cdmlib-parent</artifactId>
-    <version>5.2.0</version>
+    <version>5.3.0</version>
     <relativePath>../pom.xml</relativePath>
   </parent>
 
index 46468e351fbe46f25e775e8b70114732ca7ffa9d..0fa96af2325b5e4bee98784b7e4e1f4ef5e47bd0 100644 (file)
@@ -28,7 +28,6 @@ import net.sf.ehcache.Element;
 /**
  * @author cmathew
  * @since 19 Feb 2015
- *
  */
 public class CacheLoader {
     private static final Logger logger = Logger.getLogger(CacheLoader.class);
@@ -40,11 +39,9 @@ public class CacheLoader {
     private final Cache cdmlibModelCache;
 
 
-
     public CacheLoader(ICdmCacher cdmCacher) {
         this.cdmCacher = cdmCacher;
         this.cdmlibModelCache = CdmRemoteCacheManager.getInstance().getCdmModelGetMethodsCache();
-
     }
 
 
@@ -69,7 +66,7 @@ public class CacheLoader {
         } else if (obj instanceof Collection) {
             return (T) load((Collection<T>)obj, recursive, update);
         } else if(obj instanceof Pager) {
-               load(((Pager)obj).getRecords(), recursive, update);
+            load(((Pager)obj).getRecords(), recursive, update);
             return obj;
         } else if(obj instanceof MergeResult) {
             return (T) load((MergeResult<CdmBase>)obj, recursive, update);
@@ -93,21 +90,19 @@ public class CacheLoader {
             return (T) loadRecursive((MergeResult)obj, alreadyVisitedEntities, update);
         }
 
-
-        logger.info("No caching yet for type " + obj.getClass().getName());
+        if (logger.isInfoEnabled()){logger.info("No caching yet for type " + obj.getClass().getName());}
 
         return obj;
     }
 
     public <T extends Object> Map<T,T> load(Map<T,T> map, boolean recursive, boolean update){
 
-
         if(isRecursiveEnabled && recursive) {
-            logger.debug("---- starting recursive load for cdm entity map");
-            List<Object> alreadyVisitedEntities = new ArrayList<Object>();
+            if (logger.isDebugEnabled()){logger.debug("---- starting recursive load for cdm entity map");}
+            List<Object> alreadyVisitedEntities = new ArrayList<>();
             Map<T,T> cachedMap = load(map, alreadyVisitedEntities, update);
             alreadyVisitedEntities.clear();
-            logger.debug("---- ending recursive load for cdm entity map \n");
+            if (logger.isDebugEnabled()){logger.debug("---- ending recursive load for cdm entity map \n");}
             return cachedMap;
         } else {
             return load(map, null, update);
@@ -116,18 +111,18 @@ public class CacheLoader {
 
 
     private <T extends Object> Map<T,T> load(Map<T,T> map, List<Object> alreadyVisitedEntities, boolean update){
-        //map = (Map<T,T>)deproxy(map);
 
         if(map == null || map.isEmpty()) {
             return map;
         }
 
-        int originalMapSize = map.size();
         Object[] result = new Object[ map.size() * 2 ];
         Iterator<Map.Entry<T,T>> iter = map.entrySet().iterator();
         int i=0;
         // to avoid ConcurrentModificationException
-        alreadyVisitedEntities.add(map);
+        if (alreadyVisitedEntities != null){
+            alreadyVisitedEntities.add(map);
+        }
         while ( iter.hasNext() ) {
             Map.Entry<T,T> e = iter.next();
             result[i++] = e.getKey();
@@ -155,11 +150,11 @@ public class CacheLoader {
 
         Collection<T> loadedCollection;
         if(isRecursiveEnabled && recursive) {
-            logger.debug("---- starting recursive load for cdm entity collection");
-            List<Object> alreadyVisitedEntities = new ArrayList<Object>();
+            if (logger.isDebugEnabled()){logger.debug("---- starting recursive load for cdm entity collection");}
+            List<Object> alreadyVisitedEntities = new ArrayList<>();
             Collection<T> cachedCollection = load(collection, alreadyVisitedEntities, update);
             alreadyVisitedEntities.clear();
-            logger.debug("---- ending recursive load for cdm entity collection \n");
+            if (logger.isDebugEnabled()){logger.debug("---- ending recursive load for cdm entity collection \n");}
             loadedCollection = cachedCollection;
         } else {
             loadedCollection = load(collection, null, update);
@@ -170,8 +165,6 @@ public class CacheLoader {
     @SuppressWarnings("unchecked")
     private <T extends Object> Collection<T> load(Collection<T> collection, List<Object> alreadyVisitedEntities, boolean update) {
 
-
-
         if(collection == null || collection.isEmpty()) {
             return collection;
         }
@@ -180,7 +173,9 @@ public class CacheLoader {
         Iterator<T> collectionItr = collection.iterator();
         int count = 0;
         // to avoid ConcurrentModificationException
-        alreadyVisitedEntities.add(collection);
+        if (alreadyVisitedEntities != null){
+            alreadyVisitedEntities.add(collection);
+        }
         while(collectionItr.hasNext()) {
             Object obj = collectionItr.next();
             if(alreadyVisitedEntities == null) {
@@ -205,13 +200,13 @@ public class CacheLoader {
     public MergeResult<CdmBase> load(MergeResult<CdmBase> mergeResult, boolean recursive, boolean update) {
         CdmBase cdmBase = load(mergeResult.getMergedEntity(), recursive, update);
         load(mergeResult.getNewEntities(), recursive, update);
-        return new MergeResult(cdmBase, mergeResult.getNewEntities());
+        return new MergeResult<>(cdmBase, mergeResult.getNewEntities());
     }
 
     public MergeResult<CdmBase> loadRecursive(MergeResult<CdmBase> mergeResult,List<Object> alreadyVisitedEntities, boolean update) {
         CdmBase cdmBase = loadRecursive(mergeResult.getMergedEntity(), alreadyVisitedEntities, update);
         loadRecursive(mergeResult.getNewEntities(), alreadyVisitedEntities, update);
-        return new MergeResult(cdmBase, mergeResult.getNewEntities());
+        return new MergeResult<>(cdmBase, mergeResult.getNewEntities());
     }
 
     /**
@@ -252,23 +247,23 @@ public class CacheLoader {
             }
         }
 
-        CdmBase loadedCdmBase;
+        T loadedCdmBase;
         if(isRecursiveEnabled && recursive) {
             logger.debug("---- starting recursive load for cdm entity " + cdmEntity.getClass().getName() + " with id " + cdmEntity.getId());
             List<Object> alreadyVisitedEntities = new ArrayList<Object>();
-            CdmBase cb =  loadRecursive(cdmEntity, alreadyVisitedEntities, update);
+            T cb =  loadRecursive(cdmEntity, alreadyVisitedEntities, update);
             alreadyVisitedEntities.clear();
             logger.debug("---- ending recursive load for cdm entity " + cdmEntity.getClass().getName() + " with id " + cdmEntity.getId() + "\n");
             loadedCdmBase =  cb;
         } else {
             loadedCdmBase = load(cdmEntity);
         }
-        return (T) loadedCdmBase;
+        return loadedCdmBase;
 
     }
 
 
-    protected CdmBase load(CdmBase cdmEntity) {
+    protected <T extends CdmBase> T load(T cdmEntity) {
         logger.debug("loading object of type " + cdmEntity.getClass().getName() + " with id " + cdmEntity.getId());
         cdmCacher.put((CdmBase)ProxyUtils.deproxy(cdmEntity));
         return cdmCacher.getFromCache(cdmEntity);
@@ -293,21 +288,22 @@ public class CacheLoader {
      *            them to the value of the cdm entity being loaded
      * @return
      */
-    private CdmBase loadRecursive(CdmBase cdmEntity,  List<Object> alreadyVisitedEntities, boolean update) {
+    private <T extends CdmBase> T loadRecursive(T cdmEntity,  List<Object> alreadyVisitedEntities, boolean update) {
 
-        CdmBase cachedCdmEntity = load(cdmEntity);
+        T cachedCdmEntity = load(cdmEntity);
 
         // we want to recursive through the cdmEntity (and not the cachedCdmEntity)
         // since there could be new or deleted objects in the cdmEntity sub-graph
 
         // start by getting the fields from the cdm entity
-        CdmBase deproxiedEntity = (CdmBase)ProxyUtils.deproxyOrNull(cdmEntity);
+        //TODO improve generics for deproxyOrNull, probably need to split the method
+        T deproxiedEntity = (T)ProxyUtils.deproxyOrNull(cdmEntity);
         if(deproxiedEntity != null){
             String className = deproxiedEntity.getClass().getName();
-            CdmModelFieldPropertyFromClass cmgmfc = getFromCdmlibModelCache(className);
-            if(cmgmfc != null) {
+            CdmModelFieldPropertyFromClass cmfpfc = getFromCdmlibModelCache(className);
+            if(cmfpfc != null) {
                 alreadyVisitedEntities.add(cdmEntity);
-                List<String> fields = cmgmfc.getFields();
+                List<String> fields = cmfpfc.getFields();
                 for(String field : fields) {
                     // retrieve the actual object corresponding to the field.
                     // this object will be either a CdmBase or a Collection / Map
@@ -328,7 +324,7 @@ public class CacheLoader {
                 throw new CdmClientCacheException("CdmEntity with class " + cdmEntity.getClass().getName() + " is not found in the cdmlib model cache. " +
                         "The cache may be corrupted or not in sync with the latest model version" );
             }
-        } else {
+        } else { //deproxiedEntity == null
             logger.debug("ignoring uninitlialized proxy " + cdmEntity.getClass() + "#" + cdmEntity.getId());
         }
 
index 9c18fbc6ca30a870cfc140d7ae5d7af20f5411e1..4025bbb2aaac30375650537764616f8185c04c19 100644 (file)
@@ -2,27 +2,32 @@ package eu.etaxonomy.cdm.cache;
 
 import eu.etaxonomy.cdm.model.common.CdmBase;
 
-public class CdmEntityCacheKey {
+public class CdmEntityCacheKey<T extends CdmBase> {
 
-       private Class<? extends CdmBase> persistenceClass;
+       private Class<T> persistenceClass;
        private int persistenceId;
-       
-       public CdmEntityCacheKey(CdmBase cdmBase) {
-               this.persistenceClass = cdmBase.getClass();
+
+
+       public CdmEntityCacheKey(T cdmBase) {
+               this.persistenceClass = (Class<T>)cdmBase.getClass();
                this.persistenceId = cdmBase.getId();
        }
-       
-       public CdmEntityCacheKey(Class<? extends CdmBase> clazz, int id) {
+
+       /**
+        * @param clazz
+        * @param uuid
+        */
+       public CdmEntityCacheKey(Class<T> clazz, int id) {
                this.persistenceClass = clazz;
                this.persistenceId = id;
        }
-       
 
-       
-       public Class<? extends CdmBase> getPersistenceClass() {
+
+
+       public Class<? extends T> getPersistenceClass() {
                return persistenceClass;
        }
-       
+
        public int getPersistenceId() {
                return persistenceId;
        }
@@ -31,23 +36,24 @@ public class CdmEntityCacheKey {
                if(obj == null || !(obj instanceof CdmEntityCacheKey)) {
                        return false;
                }
-               
+
                if(this == obj) {
                        return true;
                }
-               CdmEntityCacheKey that = (CdmEntityCacheKey) obj;
-               if(this.persistenceClass.equals(that.persistenceClass) && this.persistenceId == that.persistenceId) {
+               CdmEntityCacheKey<?> that = (CdmEntityCacheKey<?>) obj;
+               if(this.persistenceClass.equals(that.persistenceClass)
+                       && this.persistenceId == that.persistenceId) {
                        return true;
                }
-               
+
                return false;
        }
-       
+
        @Override
        public int hashCode() {
                return (this.persistenceClass.getName() + String.valueOf(this.persistenceId)).hashCode();
        }
-       
+
        @Override
        public String toString() {
                return this.persistenceClass.getName() + String.valueOf(this.persistenceId);
index 04d14c7adf8da414db342bf93829f09c2eb48ba4..fcee6163f7a97f249efb36b76fa43af35898bd9f 100644 (file)
@@ -8,6 +8,7 @@
  */
 package eu.etaxonomy.cdm.cache;
 
+import java.lang.management.ManagementFactory;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.HashMap;
@@ -15,6 +16,8 @@ import java.util.List;
 import java.util.Map;
 import java.util.UUID;
 
+import javax.management.MBeanServer;
+
 import org.apache.log4j.Logger;
 
 import eu.etaxonomy.cdm.api.cache.CdmCacher;
@@ -27,9 +30,14 @@ import net.sf.ehcache.Element;
 import net.sf.ehcache.Status;
 import net.sf.ehcache.config.CacheConfiguration;
 import net.sf.ehcache.config.SizeOfPolicyConfiguration;
+import net.sf.ehcache.management.ManagementService;
 import net.sf.ehcache.statistics.LiveCacheStatistics;
 
 /**
+ * This cache handle transient (id>0) and volatile (id=0) CdmBase object.
+ * Volatile objects need to be added via {@link #addNewEntity(CdmBase)}
+ * and there id is updated as soon as a transient object with same
+ * uuid is added to the cacher.
  *
  * This cache guarantees that
  *  - all objects put will be ancestors of CdmBase
@@ -42,42 +50,37 @@ import net.sf.ehcache.statistics.LiveCacheStatistics;
  * @since 14 Oct 2014
  *
  */
-
 public class CdmTransientEntityCacher implements ICdmCacher {
 
     private static final Logger logger = Logger.getLogger(CdmTransientEntityCacher.class);
 
-
-    // removed since unused ########################
-    // private final eu.etaxonomy.cdm.session.ICdmEntitySessionManager cdmEntitySessionManager;
-
-    /**
-     * permanent cache which is usually used to cache terms permanently
-     * FIXME rename to permanent cache
-     */
-    private static CdmCacher cdmCacher;
-
+    //the key for this cacher within the CacheManager
     private final String cacheId;
 
+    //the cache
     private final Cache cache;
 
+    //permanent cache which is usually used to cache terms permanently
+    private static CdmCacher permanentCache;
+
     private final CacheLoader cacheLoader;
 
-    private final Map<UUID, CdmBase> newEntitiesMap = new HashMap<UUID, CdmBase>();
+    //map for volatile entities (id=0)
+    private final Map<UUID, CdmBase> newEntitiesMap = new HashMap<>();
+
+    private static volatile boolean managementBeansConfigured = false;
+
+// ********************* CONSTRUCTOR **********************************/
 
     public CdmTransientEntityCacher(String cacheId) {
         this.cacheId = cacheId;
 
         cache = new Cache(getEntityCacheConfiguration(cacheId));
 
-        CacheManager.create().removeCache(cache.getName());
-        CacheManager.create().addCache(cache);
-
-        // removed since unused ########################
-        // this.cdmEntitySessionManager = cdmEntitySessionManager;
+        createCacheManager().removeCache(cache.getName());
+        createCacheManager().addCache(cache);
 
         cacheLoader = new CacheLoader(this);
-
     }
 
     public CdmTransientEntityCacher(Object sessionOwner) {
@@ -88,6 +91,8 @@ public class CdmTransientEntityCacher implements ICdmCacher {
         return sessionOwner.getClass().getName() +  String.valueOf(sessionOwner.hashCode());
     }
 
+//****************************** METHODS *********************************/
+
     /**
      * Returns the default cache configuration.
      *
@@ -99,15 +104,15 @@ public class CdmTransientEntityCacher implements ICdmCacher {
         sizeOfConfig.setMaxDepthExceededBehavior("abort");
 
         return new CacheConfiguration(cacheId, 0)
-        .eternal(true)
-        .statistics(true)
-        .sizeOfPolicy(sizeOfConfig)
-        .overflowToOffHeap(false);
+            .eternal(true)
+            .statistics(true)
+            .sizeOfPolicy(sizeOfConfig)
+            .overflowToOffHeap(false);
 
     }
 
-    public static void setDefaultCacher(CdmCacher css) {
-        cdmCacher = css;
+    public static void setPermanentCacher(CdmCacher permanentCacher) {
+        permanentCache = permanentCacher;
     }
 
     public LiveCacheStatistics getCacheStatistics() {
@@ -125,7 +130,27 @@ public class CdmTransientEntityCacher implements ICdmCacher {
      * @return
      */
     private Cache getCache() {
-        return  CacheManager.create().getCache(cacheId);
+        return  createCacheManager().getCache(cacheId);
+    }
+
+    /**
+     * @return
+     */
+    protected CacheManager createCacheManager() {
+
+        CacheManager cacheManager = CacheManager.create();
+
+        if(!managementBeansConfigured){
+            MBeanServer mBeanServer = ManagementFactory.getPlatformMBeanServer();
+            boolean registerCacheManager = false;
+            boolean registerCaches = true;
+            boolean registerCacheConfigurations = false;
+            boolean registerCacheStatistics = true;
+            ManagementService.registerMBeans(cacheManager, mBeanServer, registerCacheManager, registerCaches, registerCacheConfigurations, registerCacheStatistics);
+            managementBeansConfigured = true;
+        }
+
+        return cacheManager;
     }
 
     public <T extends Object> T load(T obj, boolean update) {
@@ -218,21 +243,22 @@ public class CdmTransientEntityCacher implements ICdmCacher {
     }
 
     /**
-     * Puts the passed <code>cdmEntity</code> into the cache as long it does not yet exist in the caches.
+     * Puts the passed <code>cdmEntity</code> into the cache as long it does
+     * not yet exist in the caches.
      * <p>
      * The adjacent <b>ENTITY GRAPH WILL NOT BE LOADED RECURSIVELY</b>
      */
     @Override
     public void put(CdmBase cdmEntity) {
 
-        CdmBase cachedCdmEntity = cdmCacher.load(cdmEntity);
+        CdmBase cachedCdmEntity = permanentCache.load(cdmEntity);
         if(cachedCdmEntity != null) {
             logger.info("Cdm Entity with id : " + cdmEntity.getId() + " already exists in permanent cache. Ignoring put.");
             return;
         }
-        CdmEntityCacheKey id = new CdmEntityCacheKey(cdmEntity);
+        CdmEntityCacheKey<?> key = new CdmEntityCacheKey<>(cdmEntity);
 
-        cachedCdmEntity = getFromCache(id);
+        cachedCdmEntity = getFromCache(key);
         if(cachedCdmEntity == null) {
             CdmBase cdmEntityToCache = cdmEntity;
             CdmBase newEntity = newEntitiesMap.get(cdmEntity.getUuid());
@@ -240,49 +266,52 @@ public class CdmTransientEntityCacher implements ICdmCacher {
                 newEntity.setId(cdmEntity.getId());
                 cdmEntityToCache = newEntity;
             }
-            getCache().put(new Element(id, cdmEntityToCache));
+            getCache().put(new Element(key, cdmEntityToCache));
             cdmEntityToCache.initListener();
             newEntitiesMap.remove(cdmEntity.getUuid());
-            logger.debug(" - object of type " + cdmEntityToCache.getClass().getName() + " with id " + cdmEntityToCache.getId() + " put in cache");
+            if (logger.isDebugEnabled()){logger.debug(" - object of type " + cdmEntityToCache.getClass().getName() + " with id " + cdmEntityToCache.getId() + " put in cache");}
             return;
         }
         logger.debug(" - object of type " + cdmEntity.getClass().getName() + " with id " + cdmEntity.getId() + " already exists");
     }
 
 
-    private Element getCacheElement(CdmEntityCacheKey key) {
+    private Element getCacheElement(CdmEntityCacheKey<?> key) {
         return getCache().get(key);
     }
 
 
-    public CdmBase getFromCache(CdmEntityCacheKey id) {
+    public <T extends CdmBase> T getFromCache(CdmEntityCacheKey<T> id) {
         Element e = getCacheElement(id);
 
         if (e == null) {
             return null;
         } else {
-            return (CdmBase) e.getObjectValue();
+            @SuppressWarnings("unchecked")
+            T result = (T) e.getObjectValue();
+            return result;
         }
     }
 
-    public CdmBase getFromCache(Class<? extends CdmBase> clazz, int id) {
-        CdmEntityCacheKey cacheId = generateKey(clazz,id);
+    public <T extends CdmBase> T getFromCache(Class<T> clazz, int id) {
+        CdmEntityCacheKey<T> cacheId = generateKey(clazz, id);
         return getFromCache(cacheId);
     }
 
     @Override
     public <T extends CdmBase> T getFromCache(T cdmBase) {
 
-        CdmEntityCacheKey cacheId = generateKey((CdmBase)ProxyUtils.deproxy(cdmBase));
+        CdmEntityCacheKey<T> cacheId = generateKey((T)ProxyUtils.deproxy(cdmBase));
         // first try this cache
-        CdmBase  cachedCdmEntity = getFromCache(cacheId);
+        T  cachedCdmEntity = getFromCache(cacheId);
 
         if(cachedCdmEntity == null) {
             // ... then try the permanent cache
-            cachedCdmEntity = cdmCacher.getFromCache(cdmBase.getUuid());
+            //TODO also use generics and clazz parameter for getFromCache(uuid)
+            cachedCdmEntity = (T)permanentCache.getFromCache(cdmBase.getUuid());
         }
 
-        return (T) cachedCdmEntity;
+        return cachedCdmEntity;
     }
 
     public CdmBase getFromCache(CdmBase cdmBase, Class<? extends CdmBase> clazz) {
@@ -292,7 +321,7 @@ public class CdmTransientEntityCacher implements ICdmCacher {
     }
 
     public List<CdmBase> getAllEntities() {
-        List<CdmBase> entities = new ArrayList<CdmBase>();
+        List<CdmBase> entities = new ArrayList<>();
         Map<String, CdmBase> elementsMap = getCache().getAllWithLoader(getCache().getKeys(), null);
         for (Map.Entry<String, CdmBase> entry : elementsMap.entrySet()) {
             entities.add(entry.getValue());
@@ -300,11 +329,11 @@ public class CdmTransientEntityCacher implements ICdmCacher {
         return entities;
     }
 
-    public boolean exists(CdmEntityCacheKey key) {
+    public boolean exists(CdmEntityCacheKey<?> key) {
         return (getCacheElement(key) != null);
     }
 
-    public boolean existsAndIsNotNull(CdmEntityCacheKey id) {
+    public boolean existsAndIsNotNull(CdmEntityCacheKey<?> id) {
         return getFromCache(id) != null;
     }
 
@@ -313,22 +342,20 @@ public class CdmTransientEntityCacher implements ICdmCacher {
     }
 
     public void dispose() {
-        CacheManager.create().removeCache(cache.getName());
+        createCacheManager().removeCache(cache.getName());
         cache.dispose();
         newEntitiesMap.clear();
-
     }
 
 
-    public static CdmEntityCacheKey generateKey(Class<? extends CdmBase> clazz, int id) {
-        return new CdmEntityCacheKey(clazz, id);
+    public static <T extends CdmBase> CdmEntityCacheKey<T> generateKey(Class<T> clazz, int id) {
+        return new CdmEntityCacheKey<T>(clazz, id);
     }
 
 
-    public static CdmEntityCacheKey generateKey(CdmBase cdmBase) {
-        Class<? extends CdmBase> entityClass = cdmBase.getClass();
-        int id = cdmBase.getId();
-        return new CdmEntityCacheKey(entityClass, id);
+    public static <T extends CdmBase> CdmEntityCacheKey<T> generateKey(T cdmBase) {
+        Class<T> entityClass = (Class<T>)cdmBase.getClass();
+        return new CdmEntityCacheKey<T>(entityClass, cdmBase.getId());
     }
 
     @Override
index 0b865f5091e21b667dcef2d18d7338b62852cc58..f7fe1b05c4c093baa52f59e7d99ca4b8cc5d697f 100644 (file)
@@ -6,7 +6,7 @@
   <parent>
     <groupId>eu.etaxonomy</groupId>
     <artifactId>cdmlib-parent</artifactId>
-    <version>5.2.0</version>
+    <version>5.3.0</version>
        <relativePath>../pom.xml</relativePath>
   </parent>
   
index a493a7b06e54533f36e77d839e92058b6cddefb5..ac0dd6645bef0e217ca1322e625bc22258f2df6a 100644 (file)
@@ -17,7 +17,9 @@ import java.text.Format;
 import java.util.ArrayList;
 import java.util.Date;
 import java.util.HashMap;
+import java.util.List;
 import java.util.Locale;
+import java.util.Map;
 
 import org.apache.log4j.Logger;
 import org.apache.poi.hssf.usermodel.HSSFCell;
@@ -37,40 +39,38 @@ public class ExcelUtils {
        private static final Logger logger = Logger.getLogger(ExcelUtils.class);
 
     /** Reads all rows of an Excel worksheet */
-    public static ArrayList<HashMap<String, String>> parseXLS(URI uri) throws FileNotFoundException {
+    public static List<Map<String, String>> parseXLS(URI uri) throws FileNotFoundException {
        return parseXLS(uri, null);
     }
 
 
        /** Reads all rows of an Excel worksheet */
-    public static ArrayList<HashMap<String, String>> parseXLS(URI uri, String worksheetName) throws FileNotFoundException {
-        InputStream stream;
+    public static List<Map<String, String>> parseXLS(URI uri, String worksheetName) throws FileNotFoundException {
         try {
-            stream = UriUtils.getInputStream(uri);
+            InputStream stream = UriUtils.getInputStream(uri);
             return parseXLS(stream, worksheetName);
         } catch(FileNotFoundException fne) {
             throw new FileNotFoundException(uri.toString());
         } catch(Exception ioe) {
-            logger.error("Error reading the Excel file." + uri.toString());
+            String message = "Error reading the Excel file." + uri.toString();
+            logger.error(message);
             ioe.printStackTrace();
+            throw new RuntimeException(message);
         }
-        return null;
 
     }
-        /** Reads all rows of an Excel worksheet */
-        public static ArrayList<HashMap<String, String>> parseXLS(InputStream stream, String worksheetName) throws FileNotFoundException {
 
+    /** Reads all rows of an Excel worksheet */
+    public static List<Map<String, String>> parseXLS(InputStream stream, String worksheetName) {
 
-       ArrayList<HashMap<String, String>> recordList = new ArrayList<HashMap<String, String>>();
+       List<Map<String, String>> recordList = new ArrayList<>();
 
        try {
 //             POIFSFileSystem fs = new POIFSFileSystem(UriUtils.getInputStream(uri));
 //             HSSFWorkbook wb = new HSSFWorkbook(fs);
 
-
                Workbook wb = WorkbookFactory.create(stream);
 
-
                Sheet sheet;
                if (worksheetName == null){
                        sheet = wb.getSheetAt(0);
@@ -104,15 +104,17 @@ public class ExcelUtils {
                                }
                        }
 
-
                        //first row
-                       ArrayList<String> columns = new ArrayList<String>();
+                       List<String> columns = new ArrayList<>();
                        row = sheet.getRow(0);
                        for (int c = 0; c < cols; c++){
                                cell = row.getCell(c);
                                        if(cell != null) {
-                                               columns.add(cell.toString());
-                                               if(logger.isDebugEnabled()) { logger.debug("Cell #" + c + ": " + cell.toString()); }
+                                           String str = cell.toString();
+                                           str = (str == null)? null : str.trim();
+                                           //TODO better make case sensitive, but need to adapt all existing imports for this
+                                               columns.add(str);
+                                               if(logger.isDebugEnabled()) { logger.debug("Cell #" + c + ": " + str); }
                                        } else {
                                                if(logger.isDebugEnabled()) { logger.debug("Cell #" + c + " is null"); }
                                        }
@@ -121,7 +123,7 @@ public class ExcelUtils {
                        //value rows
                        for(int r = 1; r < rows; r++) {
                                row = sheet.getRow(r);
-                               HashMap<String, String> headers = new HashMap<String, String>();
+                               Map<String, String> headers = new HashMap<>();
                                boolean notEmpty = checkIsEmptyRow(row);
                                if(notEmpty) {
                                        for(int c = 0; c < cols; c++) {
index 6b119fdd9d900c10e1800bcd2cef6e120189a9ba..f937c44cdf7f23fb44306b8b3f85601e3042633b 100644 (file)
@@ -1,17 +1,21 @@
 /**\r
 * Copyright (C) 2009 EDIT\r
-* European Distributed Institute of Taxonomy \r
+* European Distributed Institute of Taxonomy\r
 * http://www.e-taxonomy.eu\r
-* \r
+*\r
 * The contents of this file are subject to the Mozilla Public License Version 1.1\r
 * See LICENSE.TXT at the top of this package for the full license terms.\r
 */\r
 package eu.etaxonomy.cdm.common;\r
 \r
+import java.io.File;\r
+import java.io.FileOutputStream;\r
 import java.io.IOException;\r
 import java.io.InputStream;\r
 import java.io.InputStreamReader;\r
 import java.io.StringBufferInputStream;\r
+import java.net.HttpURLConnection;\r
+import java.net.URL;\r
 \r
 import org.apache.log4j.Logger;\r
 \r
@@ -21,54 +25,55 @@ import org.apache.log4j.Logger;
  *\r
  */\r
 public class StreamUtils {\r
-       \r
+\r
        public static final Logger logger = Logger.getLogger(StreamUtils.class);\r
-       \r
+       private static final int BUFFER_SIZE = 4096;\r
+\r
        /**\r
-        * Replaces each substring of this stream that matches the literal search sequence with the specified literal replace sequence. \r
+        * Replaces each substring of this stream that matches the literal search sequence with the specified literal replace sequence.\r
         * The replacement proceeds from the beginning of the stream to the end, for example, replacing "aa" with "b" in the string "aaa" will result in "ba" rather than "ab".\r
-        * \r
-        * @param stream \r
+        *\r
+        * @param stream\r
         * @param search The sequence of char values to be replaced\r
         * @param replace The replacement sequence of char values\r
         * @return\r
         * @throws IOException\r
-        * \r
+        *\r
         */\r
        public static InputStream streamReplace(InputStream stream, String search,      String replace) throws IOException {\r
                InputStreamReader reader = new InputStreamReader(stream);\r
                StringBuilder strBuilder = new StringBuilder();\r
-               \r
+\r
                char[] cbuf = new char[1024];\r
                int charsRead = -1;\r
                while ((charsRead = reader.read(cbuf)) > -1){\r
                        strBuilder.append(cbuf, 0, charsRead);\r
                }\r
-               String replacedContent = strBuilder.toString().replace(search, replace);        \r
+               String replacedContent = strBuilder.toString().replace(search, replace);\r
                StringBufferInputStream replacedStream = new StringBufferInputStream(replacedContent); //TODO replace with StringReader\r
                logger.debug(replacedContent);\r
                return replacedStream;\r
        }\r
-       \r
+\r
        public static InputStream streamReplaceAll(InputStream stream, String regex, String replace) throws IOException {\r
                InputStreamReader reader = new InputStreamReader(stream);\r
                StringBuilder strBuilder = new StringBuilder();\r
-               \r
+\r
                char[] cbuf = new char[1024];\r
                int charsRead = -1;\r
                while ((charsRead = reader.read(cbuf)) > -1){\r
                        strBuilder.append(cbuf, 0, charsRead);\r
                }\r
-               String replacedContent = strBuilder.toString().replaceAll(regex, replace);      \r
+               String replacedContent = strBuilder.toString().replaceAll(regex, replace);\r
                StringBufferInputStream replacedStream = new StringBufferInputStream(replacedContent); //TODO replace with StringReader\r
                logger.debug(replacedContent);\r
                return replacedStream;\r
        }\r
-       \r
+\r
        public static String readToString(InputStream stream) throws IOException {\r
                InputStreamReader reader = new InputStreamReader(stream);\r
                StringBuilder strBuilder = new StringBuilder();\r
-               \r
+\r
                char[] cbuf = new char[1024];\r
                int charsRead = -1;\r
                while ((charsRead = reader.read(cbuf)) > -1){\r
@@ -77,4 +82,52 @@ public class StreamUtils {
                return strBuilder.toString();\r
        }\r
 \r
+       public static void downloadFile(URL url, String saveDir)\r
+            throws IOException {\r
+\r
+        HttpURLConnection httpConn = (HttpURLConnection) url.openConnection();\r
+        int responseCode = httpConn.getResponseCode();\r
+\r
+        // always check HTTP response code first\r
+        if (responseCode == HttpURLConnection.HTTP_OK) {\r
+            String fileName = "";\r
+            String disposition = httpConn.getHeaderField("Content-Disposition");\r
+\r
+            if (disposition != null) {\r
+                // extracts file name from header field\r
+                int index = disposition.indexOf("filename=");\r
+                if (index > 0) {\r
+                    fileName = disposition.substring(index + 10,\r
+                            disposition.length() - 1);\r
+                }\r
+            } else {\r
+                // extracts file name from URL\r
+                fileName = url.getFile().toString().substring(url.getFile().lastIndexOf("/") + 1,\r
+                        url.getFile().length());\r
+            }\r
+\r
+            // opens input stream from the HTTP connection\r
+            InputStream inputStream = httpConn.getInputStream();\r
+            String saveFilePath = saveDir + File.separator + fileName;\r
+\r
+            // opens an output stream to save into file\r
+            FileOutputStream outputStream = new FileOutputStream(saveFilePath);\r
+\r
+            int bytesRead = -1;\r
+            byte[] buffer = new byte[BUFFER_SIZE];\r
+            while ((bytesRead = inputStream.read(buffer)) != -1) {\r
+                outputStream.write(buffer, 0, bytesRead);\r
+            }\r
+\r
+            outputStream.close();\r
+            inputStream.close();\r
+\r
+\r
+        } else {\r
+           logger.error("No file to download. Server replied HTTP code: " + responseCode);\r
+        }\r
+        httpConn.disconnect();\r
+    }\r
+\r
+\r
 }\r
index ffc430e7fdee96dd3db2a086ec38972f923908e5..b5dfb58c6ed9c17a013f45e0c447b5aa8be6ada1 100644 (file)
@@ -32,6 +32,7 @@ public enum UTF8 {
        ACUTE_ACCENT("\u00B4"),     //Acute Accent, looks a bit similar to th single quotation mark
        BLACK_CIRCLE("\u25CF"),       //Black circle, symbol for endemic
        DEGREE_SIGN("\u00B0"),      //°
+       NARROW_NO_BREAK("\u202F")
        ;
 
        private String value;
index 834ed20004de010ef2184ed6d5d614624c880b49..a3f74f4f4e10682c86d72662479d45790994579f 100644 (file)
@@ -3,7 +3,7 @@
   <parent>
     <groupId>eu.etaxonomy</groupId>
     <artifactId>cdmlib-parent</artifactId>
-    <version>5.2.0</version>
+    <version>5.3.0</version>
     <relativePath>../pom.xml</relativePath>
   </parent>
 
index 58caec723c6a7bf6d561ad47710aace66bbca06c..89c5328f6295844c56fc27aed325bcfde105b6ac 100644 (file)
@@ -6,7 +6,7 @@
        <parent>
            <groupId>eu.etaxonomy</groupId>
            <artifactId>cdmlib-parent</artifactId>
-           <version>5.2.0</version>
+           <version>5.3.0</version>
            <relativePath>../pom.xml</relativePath>
        </parent>
 
index 07e66bb789c7d28587445059543c217c286c0572..d0797b8fb35c4ddaf3b558b3315be46d13b36701 100644 (file)
@@ -8,7 +8,6 @@
  */
 package eu.etaxonomy.cdm.ext.geo;
 
-import java.io.File;
 import java.io.IOException;
 import java.io.Reader;
 import java.util.HashMap;
@@ -17,7 +16,6 @@ import java.util.Map;
 import java.util.Set;
 
 import org.apache.commons.lang.ArrayUtils;
-import org.apache.log4j.Level;
 import org.apache.log4j.Logger;
 
 import au.com.bytecode.opencsv.CSVReader;
@@ -82,7 +80,7 @@ public class ShpAttributesToNamedAreaMapper {
      */
     public Map<NamedArea, String> readCsv(Reader reader, List<String> idSearchFields, String wmsLayerName) throws IOException {
 
-        logger.setLevel(Level.DEBUG);
+        //logger.setLevel(Level.DEBUG);
 
         Map<NamedArea, String> resultMap = new HashMap<>(areas.size());
 
index c31b695178205ae5735bd15bb600c4f7499afde2..1e425a0d57b2c5aff1ba24a56bc49b6efa75b907 100644 (file)
@@ -4,7 +4,7 @@
   <parent>
     <groupId>eu.etaxonomy</groupId>
     <artifactId>cdmlib-parent</artifactId>
-    <version>5.2.0</version>
+    <version>5.3.0</version>
     <relativePath>../pom.xml</relativePath>
   </parent>
 
index ffb7b3519779798e835ffa34c4e449f406abffd0..00cf9265e6f8c133e118659858869390eff867bb 100755 (executable)
@@ -20,6 +20,7 @@ import java.util.Set;
 import org.apache.commons.lang3.StringUtils;
 import org.springframework.stereotype.Component;
 
+import eu.etaxonomy.cdm.api.service.name.TypeDesignationSetManager;
 import eu.etaxonomy.cdm.common.CdmUtils;
 import eu.etaxonomy.cdm.common.monitor.IProgressMonitor;
 import eu.etaxonomy.cdm.filter.TaxonNodeFilter;
@@ -59,7 +60,6 @@ import eu.etaxonomy.cdm.model.media.MediaRepresentation;
 import eu.etaxonomy.cdm.model.media.MediaRepresentationPart;
 import eu.etaxonomy.cdm.model.name.HomotypicalGroup;
 import eu.etaxonomy.cdm.model.name.HomotypicalGroupNameComparator;
-import eu.etaxonomy.cdm.model.name.NameTypeDesignation;
 import eu.etaxonomy.cdm.model.name.NomenclaturalStatus;
 import eu.etaxonomy.cdm.model.name.Rank;
 import eu.etaxonomy.cdm.model.name.SpecimenTypeDesignation;
@@ -78,6 +78,9 @@ import eu.etaxonomy.cdm.model.taxon.Taxon;
 import eu.etaxonomy.cdm.model.taxon.TaxonNode;
 import eu.etaxonomy.cdm.model.taxon.TaxonRelationship;
 import eu.etaxonomy.cdm.model.taxon.TaxonRelationshipType;
+import eu.etaxonomy.cdm.strategy.cache.TagEnum;
+import eu.etaxonomy.cdm.strategy.cache.TaggedText;
+import eu.etaxonomy.cdm.strategy.cache.reference.DefaultReferenceCacheStrategy;
 import eu.etaxonomy.cdm.strategy.exceptions.UnknownCdmTypeException;
 
 /**
@@ -988,7 +991,7 @@ public class CdmLightClassificationExport
             HomotypicalGroup group =name.getHomotypicalGroup();
 
             if (state.getHomotypicalGroupFromStore(group.getId()) == null){
-                handleHomotypicalGroup(state, group);
+                handleHomotypicalGroup(state, HibernateProxyHelper.deproxy(group, HomotypicalGroup.class));
             }
             csvLine[table.getIndex(CdmLightExportTable.HOMOTYPIC_GROUP_FK)] = getId(state, group);
             List<TaxonName> typifiedNames = new ArrayList<>();
@@ -1276,7 +1279,7 @@ public class CdmLightClassificationExport
      */
     private void handleHomotypicalGroup(CdmLightExportState state, HomotypicalGroup group) {
         try {
-            state.addHomotypicalGroupToStore(group);
+           state.addHomotypicalGroupToStore(group);
             CdmLightExportTable table = CdmLightExportTable.HOMOTYPIC_GROUP;
             String[] csvLine = new String[table.getSize()];
 
@@ -1298,18 +1301,55 @@ public class CdmLightClassificationExport
             }else{
                 csvLine[table.getIndex(CdmLightExportTable.HOMOTYPIC_GROUP_STRING)] = "";
             }
-            Set<TypeDesignationBase> typeDesigantions = group.getTypeDesignations();
+            Set<TypeDesignationBase> typeDesigantionSet = group.getTypeDesignations();
             List<TypeDesignationBase> designationList = new ArrayList<>();
-            designationList.addAll(typeDesigantions);
+            designationList.addAll(typeDesigantionSet);
             Collections.sort(designationList, new TypeComparator());
             StringBuffer typeDesignationString = new StringBuffer();
-            for (TypeDesignationBase typeDesignation: typeDesigantions){
+            List<TaggedText> list = new ArrayList<TaggedText>();
+            if (!designationList.isEmpty()){
+                TypeDesignationSetManager manager = new TypeDesignationSetManager(group);
+
+                list.addAll( manager.toTaggedTextWithCitation());
+                System.err.println(list.toString());
+            }
+            StringBuffer homotypicalGroupTypeDesignationString = new StringBuffer();
+
+            for (TaggedText text:list){
+                if (text != null && text.getText() != null && (text.getText().equals("Type:") || text.getText().equals("NameType:"))){
+                  //do nothing
+                } else if (text.getType().equals(TagEnum.reference)){
+                    homotypicalGroupTypeDesignationString.append(text.getText());
+                }else if (text.getType().equals(TagEnum.typeDesignation)){
+                    homotypicalGroupTypeDesignationString.append(text.getText().replace(").", "").replace("(", "").replace(")", "") );
+                } else{
+                    homotypicalGroupTypeDesignationString.append(text.getText());
+                }
+            }
+
+
+            String typeDesignations= homotypicalGroupTypeDesignationString.toString();
+            typeDesignations = typeDesignations.trim();
+
+            typeDesignations += ".";
+            typeDesignations = typeDesignations.replace("..", ".");
+            typeDesignations = typeDesignations.replace(". .", ".");
+            if (typeDesignations.equals(".")){
+                typeDesignations = null;
+            }
+            System.err.println(typeDesignations);
+       /*     for (TypeDesignationBase typeDesignation: designationList){
+                //[Vorschlag Soll:]
+               // Sumatra Utara, Kab. Karo, around Sidikalang areas, 1000─1500 m, Dec 11, 2003, Nepenthes Team (Hernawati, P. Akhriadi & I. Petra), NP 354 (‘ANDA’–Holo, BO–Iso) [fide Akhriadi & al. 2004]
                 if (typeDesignation != null && typeDesignation.getTypeStatus() != null){
-                    typeDesignationString.append(typeDesignation.getTypeStatus().getTitleCache() + ": ");
+                    typeDesignationString.append(typeDesignation.getTypeStatus().getTitleCache() + ":");
                 }
                 if (typeDesignation instanceof SpecimenTypeDesignation){
                     if (((SpecimenTypeDesignation)typeDesignation).getTypeSpecimen() != null){
-                        typeDesignationString.append(((SpecimenTypeDesignation)typeDesignation).getTypeSpecimen().getTitleCache());
+                        typeDesignationString.append(" "+((SpecimenTypeDesignation)typeDesignation).getTypeSpecimen().getTitleCache());
+                        if (typeDesignationString.lastIndexOf(".") == typeDesignationString.length()){
+                            typeDesignationString.deleteCharAt(typeDesignationString.lastIndexOf("."));
+                        }
                         handleSpecimen(state, ((SpecimenTypeDesignation)typeDesignation).getTypeSpecimen());
                     }
                 }else{
@@ -1318,7 +1358,7 @@ public class CdmLightClassificationExport
                     }
                 }
                 if(typeDesignation.getCitation() != null ){
-                    typeDesignationString.append(", "+typeDesignation.getCitation().getTitleCache());
+                    typeDesignationString.append(" [fide " + ((DefaultReferenceCacheStrategy)typeDesignation.getCitation().getCacheStrategy()).createShortCitation(typeDesignation.getCitation()) +"]");
                 }
                 //TODO...
                 /*
@@ -1331,16 +1371,21 @@ public class CdmLightClassificationExport
                 Aufbau der Typusinformationen:
                 Land: Lokalität mit Höhe und Koordinaten; Datum; Sammler Nummer (Herbar/Barcode, Typusart; Herbar/Barcode, Typusart â€¦)
 
-                 */
-            }
-            String typeDesignations = typeDesignationString.toString();
+
+            }*/
+           // typeDesignations = typeDesignationString.toString();
             if (typeDesignations != null){
+                if (!typeDesignations.endsWith(".") ){
+                    typeDesignations =typeDesignations + ".";
+                }
                 csvLine[table.getIndex(CdmLightExportTable.TYPE_STRING)] = typeDesignations;
+
             }else{
                 csvLine[table.getIndex(CdmLightExportTable.TYPE_STRING)] = "";
             }
             state.getProcessor().put(table, String.valueOf(group.getId()), csvLine);
         } catch (Exception e) {
+            e.printStackTrace();
             state.getResult().addException(e, "An unexpected error occurred when handling homotypic group " +
                     cdmBaseStr(group) + ": " + e.getMessage());
         }
@@ -1498,7 +1543,7 @@ public class CdmLightClassificationExport
             String[] csvLine = new String[table.getSize()];
             csvLine[table.getIndex(CdmLightExportTable.REFERENCE_ID)] = getId(state, reference);
             //TODO short citations correctly
-            String shortCitation = createShortCitation(reference);  //Should be Author(year) like in Taxon.sec
+            String shortCitation = ((DefaultReferenceCacheStrategy)reference.getCacheStrategy()).createShortCitation(reference);  //Should be Author(year) like in Taxon.sec
             csvLine[table.getIndex(CdmLightExportTable.BIBLIO_SHORT_CITATION)] = shortCitation;
             //TODO get preferred title
             csvLine[table.getIndex(CdmLightExportTable.REF_TITLE)] = reference.getTitle();
@@ -1542,54 +1587,7 @@ public class CdmLightClassificationExport
     }
 
 
-    /**
-     * @param reference
-     * @return
-     */
-    private String createShortCitation(Reference reference) {
-        TeamOrPersonBase<?> authorship = reference.getAuthorship();
-        String shortCitation = "";
-        if (authorship == null) {
-            return null;
-        }
-        authorship = HibernateProxyHelper.deproxy(authorship);
-        if (authorship instanceof Person){
-            shortCitation = ((Person)authorship).getFamilyName();
-            if (StringUtils.isBlank(shortCitation) ){
-                shortCitation = ((Person)authorship).getTitleCache();
-            }
-        }
-        else if (authorship instanceof Team){
 
-            Team authorTeam = HibernateProxyHelper.deproxy(authorship, Team.class);
-            int index = 0;
-
-            for (Person teamMember : authorTeam.getTeamMembers()){
-                index++;
-                if (index == 3){
-                    shortCitation += " & al.";
-                    break;
-                }
-                String concat = concatString(authorTeam, authorTeam.getTeamMembers(), index);
-                if (teamMember.getFamilyName() != null){
-                    shortCitation += concat + teamMember.getFamilyName();
-                }else{
-                    shortCitation += concat + teamMember.getTitleCache();
-                }
-
-            }
-            if (StringUtils.isBlank(shortCitation)){
-                shortCitation = authorTeam.getTitleCache();
-            }
-
-        }
-        if (!StringUtils.isBlank(reference.getDatePublished().getFreeText())){
-            shortCitation = shortCitation + " (" + reference.getDatePublished().getFreeText() + ")";
-        }else if (!StringUtils.isBlank(reference.getYear()) ){
-            shortCitation = shortCitation + " (" + reference.getYear() + ")";
-        }
-        return shortCitation;
-    }
 
     /**
      * @param reference
index 466a53c06fe17d00d8f745db63f0876f3442bdf7..0957a0fe5d295f18362ed195c0a03482a72bf0d1 100644 (file)
@@ -138,4 +138,21 @@ public class ExportResult extends IoResultBase implements Serializable {
         this.exportType = exportType;
     }
 
+    /**
+     * @param report
+     */
+    @Override
+    protected void addShortDescription(StringBuffer report) {
+        if (this.isSuccess()){
+            report.append("\n" + "Export was successfull.");
+        }
+
+        if (!this.isSuccess()){
+            report.append("\n" + "Export had some problems.");
+        }
+
+    }
+
+
+
 }
index 4e56e1e527a867149b31cd67a08d689d14186bcc..3d317f74a72d78545a19d7f952510064d9e7e667 100644 (file)
@@ -28,7 +28,6 @@ import eu.etaxonomy.cdm.filter.TaxonNodeFilter;
 import eu.etaxonomy.cdm.io.common.TaxonNodeOutStreamPartitioner;
 import eu.etaxonomy.cdm.io.common.XmlExportState;
 import eu.etaxonomy.cdm.model.common.CdmBase;
-import eu.etaxonomy.cdm.model.common.DefinedTermBase;
 import eu.etaxonomy.cdm.model.common.Language;
 import eu.etaxonomy.cdm.model.description.DescriptionBase;
 import eu.etaxonomy.cdm.model.description.DescriptionElementBase;
@@ -492,17 +491,13 @@ public class CsvNameExport extends CsvNameExportBase {
         if (state.getNotesFeature() != null){
             return state.getNotesFeature();
         } else{
-            Pager<DefinedTermBase> notesFeature = getTermService().findByTitleWithRestrictions(Feature.class, "Notes" ,MatchMode.EXACT, null, null, null, null, null);
+            Pager<Feature> notesFeature = getTermService().findByTitleWithRestrictions(Feature.class, "Notes" ,MatchMode.EXACT, null, null, null, null, null);
             if (notesFeature.getRecords().size() == 0){
                 return null;
             }else{
-                DefinedTermBase<?> feature=  notesFeature.getRecords().iterator().next();
-                if (feature instanceof Feature){
-                    state.setNotesFeature((Feature)feature);
-                    return (Feature) feature;
-                } else{
-                    return null;
-                }
+                Feature feature=  notesFeature.getRecords().iterator().next();
+                state.setNotesFeature(feature);
+                return feature;
             }
         }
 
index 38b3996822df25e78eb74f8eec6f876255b5b977..6b63eaa8d141d26f11ccf0dbb0bbe9d8efa4fbe1 100644 (file)
@@ -59,7 +59,7 @@ public class ExcelDistributionUpdate
      * {@inheritDoc}
      */
     @Override
-    protected void analyzeRecord(HashMap<String, String> record, ExcelDistributionUpdateState state) {
+    protected void analyzeRecord(Map<String, String> record, ExcelDistributionUpdateState state) {
         // nothing to do
     }
 
@@ -68,7 +68,7 @@ public class ExcelDistributionUpdate
      */
     @Override
     protected void firstPass(ExcelDistributionUpdateState state) {
-        HashMap<String, String> record = state.getOriginalRecord();
+        Map<String, String> record = state.getOriginalRecord();
         String line = state.getCurrentLine() + ": ";
         String taxonUuid = getValue(record, "taxon_uuid");
         String taxonName = getValue(record, "Taxonname");
@@ -99,7 +99,7 @@ public class ExcelDistributionUpdate
      * @param record
      * @param line
      */
-    private void handleAreasForTaxon(ExcelDistributionUpdateState state, Taxon taxon, HashMap<String, String> record,
+    private void handleAreasForTaxon(ExcelDistributionUpdateState state, Taxon taxon, Map<String, String> record,
             String line) {
         ImportResult result = state.getResult();
         Map<NamedArea, Set<Distribution>> existingDistributions = getExistingDistributions(state, taxon, line);
@@ -165,7 +165,7 @@ public class ExcelDistributionUpdate
     }
 
     private Map<NamedArea, Distribution> getNewDistributions(ExcelDistributionUpdateState state,
-            HashMap<String, String> record, String line) {
+            Map<String, String> record, String line) {
 
         Map<NamedArea, Distribution> result = new HashMap<>();
 
index 3927f023db7b001d076cc3695994349c5533889d..55b58cbb06febf0f65f8bc66115d39d8e76066bf 100755 (executable)
@@ -12,8 +12,7 @@ package eu.etaxonomy.cdm.io.excel.common;
 import java.io.ByteArrayInputStream;
 import java.io.FileNotFoundException;
 import java.net.URI;
-import java.util.ArrayList;
-import java.util.HashMap;
+import java.util.List;
 import java.util.Map;
 import java.util.UUID;
 
@@ -48,7 +47,7 @@ public abstract class ExcelImportBase<STATE extends ExcelImportState<CONFIG, ROW
 
        protected static final String SCIENTIFIC_NAME_COLUMN = "ScientificName";
 
-       private ArrayList<HashMap<String, String>> recordList = null;
+       private List<Map<String, String>> recordList = null;
 
        private ExcelImportConfiguratorBase configurator = null;
 
@@ -82,16 +81,13 @@ public abstract class ExcelImportBase<STATE extends ExcelImportState<CONFIG, ROW
                    source = state.getConfig().getSource();
                }
 
-
-
-               String sheetName = getWorksheetName();
-
+               String sheetName = getWorksheetName(state.getConfig());
 
                if (data != null){
             try {
                 ByteArrayInputStream stream = new ByteArrayInputStream(data);
                 recordList = ExcelUtils.parseXLS(stream, sheetName);
-            } catch (FileNotFoundException e) {
+            } catch (Exception e) {
                 throw new RuntimeException(e);
             }
         }else{
@@ -124,7 +120,7 @@ public abstract class ExcelImportBase<STATE extends ExcelImportState<CONFIG, ROW
        private void handleRecordList(STATE state, URI source) {
                Integer startingLine = 2;
                if (recordList != null) {
-               HashMap<String,String> record = null;
+               Map<String,String> record = null;
 
                TransactionStatus txStatus = startTransaction();
 
@@ -165,11 +161,14 @@ public abstract class ExcelImportBase<STATE extends ExcelImportState<CONFIG, ROW
        }
 
        /**
-        * To define a worksheet name override this method. Otherwise the first worksheet is taken.
+        * To define a worksheet name other then the one defined in the configurator
+        * override this method with a non <code>null</code> return value.
+        * If <code>null</code> is returned the first worksheet is taken.
+
         * @return worksheet name. <code>null</null> if no worksheet is defined.
         */
-       protected String getWorksheetName() {
-               return null;
+       protected String getWorksheetName(CONFIG config) {
+               return config.getWorksheetName();
        }
 
        @Override
@@ -185,7 +184,7 @@ public abstract class ExcelImportBase<STATE extends ExcelImportState<CONFIG, ROW
         * @param record
         * @return
         */
-       protected abstract void analyzeRecord(HashMap<String,String> record, STATE state);
+       protected abstract void analyzeRecord(Map<String,String> record, STATE state);
 
        protected abstract void firstPass(STATE state);
        protected abstract void secondPass(STATE state);
@@ -265,7 +264,7 @@ public abstract class ExcelImportBase<STATE extends ExcelImportState<CONFIG, ROW
             String colNameCache, String colNameTitleCache, String colTaxonTitleCache,
             Class<T> clazz, String line) {
 
-        HashMap<String, String> record = state.getOriginalRecord();
+        Map<String, String> record = state.getOriginalRecord();
         String strUuidTaxon = record.get(colTaxonUuid);
         if (strUuidTaxon != null){
             UUID uuidTaxon;
@@ -293,7 +292,7 @@ public abstract class ExcelImportBase<STATE extends ExcelImportState<CONFIG, ROW
 
             return CdmBase.deproxy(result, clazz);
         }else{
-            String message = "No taxon identifier found";
+            String message = "No taxon identifier column found";
             state.getResult().addWarning(message, null, line);
             return null;
         }
@@ -304,11 +303,11 @@ public abstract class ExcelImportBase<STATE extends ExcelImportState<CONFIG, ROW
      * @see #getTaxonByCdmId(ExcelImportState, String, String, String, String, Class, String)
      */
     protected void verifyName(STATE state, String colNameCache, String colNameTitleCache, String colTaxonTitleCache,
-            String line, HashMap<String, String> record, TaxonBase<?> result) {
+            String line, Map<String, String> record, TaxonBase<?> result) {
         //nameCache
         String strExpectedNameCache = record.get(colNameCache);
         String nameCache = result.getName() == null ? null : result.getName().getNameCache();
-        if (isNotBlank(strExpectedNameCache) && (!strExpectedNameCache.equals(nameCache))){
+        if (isNotBlank(strExpectedNameCache) && (!strExpectedNameCache.trim().equals(nameCache))){
             String message = "Name cache (%s) does not match expected name (%s)";
             message = String.format(message, nameCache==null? "null":nameCache, strExpectedNameCache);
             state.getResult().addWarning(message, null, line);
@@ -316,7 +315,7 @@ public abstract class ExcelImportBase<STATE extends ExcelImportState<CONFIG, ROW
         //name title
         String strExpectedNameTitleCache = record.get(colNameTitleCache);
         String nameTitleCache = result.getName() == null ? null : result.getName().getTitleCache();
-        if (isNotBlank(strExpectedNameTitleCache) && (!strExpectedNameTitleCache.equals(nameTitleCache))){
+        if (isNotBlank(strExpectedNameTitleCache) && (!strExpectedNameTitleCache.trim().equals(nameTitleCache))){
             String message = "Name title cache (%s) does not match expected name (%s)";
             message = String.format(message, nameTitleCache==null? "null":nameTitleCache, strExpectedNameTitleCache);
             state.getResult().addWarning(message, null, line);
@@ -324,7 +323,7 @@ public abstract class ExcelImportBase<STATE extends ExcelImportState<CONFIG, ROW
         //taxon title cache
         String strExpectedTaxonTitleCache = record.get(colTaxonTitleCache);
         String taxonTitleCache = result.getTitleCache();
-        if (isNotBlank(strExpectedTaxonTitleCache) && (!strExpectedTaxonTitleCache.equals(taxonTitleCache))){
+        if (isNotBlank(strExpectedTaxonTitleCache) && (!strExpectedTaxonTitleCache.trim().equals(taxonTitleCache))){
             String message = "Name cache (%s) does not match expected name (%s)";
             message = String.format(message, taxonTitleCache==null? "null":taxonTitleCache, strExpectedTaxonTitleCache);
             state.getResult().addWarning(message, null, line);
index 9dc0b4009651c69d58102a1017d633a8604d1f81..91248d2b61b768eb064b912a7240ddd233c2d066 100755 (executable)
@@ -31,6 +31,9 @@ public abstract class ExcelImportConfiguratorBase
        private byte[] stream;\r
        private boolean deduplicateReferences = false;\r
        private boolean deduplicateAuthors = false;\r
+\r
+       private String worksheetName = null;\r
+\r
        /**\r
         * @param url\r
         * @param destination\r
@@ -65,32 +68,29 @@ public abstract class ExcelImportConfiguratorBase
        }\r
         public byte[] getStream(){\r
                return stream;\r
-           }\r
-\r
-\r
-           public void setStream(byte[] stream) {\r
-               this.stream = stream;\r
-           }\r
-\r
-        /**\r
-         * @return the deduplicateReferences\r
-         */\r
-        public boolean isDeduplicateReferences() {\r
-            return deduplicateReferences;\r
-        }\r
-\r
-        /**\r
-         * @param deduplicateReferences the deduplicateReferences to set\r
-         */\r
-        public void setDeduplicateReferences(boolean deduplicateReferences) {\r
-            this.deduplicateReferences = deduplicateReferences;\r
-        }\r
-\r
-        public boolean isDeduplicateAuthors() {\r
-            return deduplicateAuthors;\r
-        }\r
-\r
-        public void setDeduplicateAuthors(boolean deduplicateAuthors) {\r
-            this.deduplicateAuthors = deduplicateAuthors;\r
-        }\r
+    }\r
+    public void setStream(byte[] stream) {\r
+        this.stream = stream;\r
+    }\r
+\r
+    public boolean isDeduplicateReferences() {\r
+        return deduplicateReferences;\r
+    }\r
+    public void setDeduplicateReferences(boolean deduplicateReferences) {\r
+        this.deduplicateReferences = deduplicateReferences;\r
+    }\r
+\r
+    public boolean isDeduplicateAuthors() {\r
+        return deduplicateAuthors;\r
+    }\r
+    public void setDeduplicateAuthors(boolean deduplicateAuthors) {\r
+        this.deduplicateAuthors = deduplicateAuthors;\r
+    }\r
+\r
+    public String getWorksheetName() {\r
+        return worksheetName;\r
+    }\r
+    public void setWorksheetName(String worksheetName) {\r
+        this.worksheetName = worksheetName;\r
+    }\r
 }\r
index 5d535071b92705fec7822301c13ffce03b12fa1c..b6d2a197fd6bde552788077ce54f0cddfef735bc 100644 (file)
@@ -9,7 +9,7 @@
 
 package eu.etaxonomy.cdm.io.excel.common;
 
-import java.util.HashMap;
+import java.util.Map;
 
 import org.apache.log4j.Logger;
 
@@ -27,16 +27,14 @@ public class ExcelImportState<CONFIG extends ExcelImportConfiguratorBase, ROW ex
 
        private Integer currentLine;
        private ROW currentRow;
-    private HashMap<String, String> originalRecord;
+    private Map<String, String> originalRecord;
 
     private Reference sourceReference;
 
-
     public ExcelImportState(CONFIG config) {
         super(config);
     }
 
-
        public Integer getCurrentLine() {
                return currentLine;
        }
@@ -64,11 +62,11 @@ public class ExcelImportState<CONFIG extends ExcelImportConfiguratorBase, ROW ex
                this.currentRow = currentRow;
        }
 
-       public HashMap<String,String> getOriginalRecord(){
+       public Map<String,String> getOriginalRecord(){
            return this.originalRecord;
        }
 
-    public void setOriginalRecord(HashMap<String,String> originalRecord){
+    public void setOriginalRecord(Map<String,String> originalRecord){
         this.originalRecord = originalRecord;
     }
 
index 75f2981affb2f6bc5327a151cbcab8d6996f455c..cda2d25cc4da548b51217d3742b4f77b66d4ddba 100644 (file)
@@ -9,8 +9,8 @@
 \r
 package eu.etaxonomy.cdm.io.excel.common;\r
 \r
-import java.util.HashMap;\r
 import java.util.List;\r
+import java.util.Map;\r
 import java.util.Set;\r
 import java.util.UUID;\r
 \r
@@ -23,7 +23,6 @@ import eu.etaxonomy.cdm.io.excel.common.ExcelRowBase.PostfixTerm;
 import eu.etaxonomy.cdm.io.specimen.excel.in.SpecimenCdmExcelImportState;\r
 import eu.etaxonomy.cdm.io.specimen.excel.in.SpecimenRow;\r
 import eu.etaxonomy.cdm.model.common.CdmBase;\r
-import eu.etaxonomy.cdm.model.common.DefinedTermBase;\r
 import eu.etaxonomy.cdm.model.common.Extension;\r
 import eu.etaxonomy.cdm.model.common.ExtensionType;\r
 import eu.etaxonomy.cdm.model.common.IdentifiableEntity;\r
@@ -53,7 +52,7 @@ public abstract class ExcelTaxonOrSpecimenImportBase<STATE extends ExcelImportSt
        protected static final String LANGUAGE = "(?i)(Language)";\r
 \r
        @Override\r
-       protected void analyzeRecord(HashMap<String, String> record, STATE state) {\r
+       protected void analyzeRecord(Map<String, String> record, STATE state) {\r
                Set<String> keys = record.keySet();\r
 \r
        ROW row = createDataHolderRow();\r
@@ -155,7 +154,7 @@ public abstract class ExcelTaxonOrSpecimenImportBase<STATE extends ExcelImportSt
         * @param keyValue\r
         * @return\r
         */\r
-       protected KeyValue makeKeyValue(HashMap<String, String> record, String originalKey, STATE state) {\r
+       protected KeyValue makeKeyValue(Map<String, String> record, String originalKey, STATE state) {\r
                KeyValue keyValue = new KeyValue();\r
                keyValue.originalKey = originalKey;\r
                String indexedKey = CdmUtils.removeDuplicateWhitespace(originalKey.trim()).toString();\r
@@ -269,7 +268,7 @@ public abstract class ExcelTaxonOrSpecimenImportBase<STATE extends ExcelImportSt
 \r
        protected boolean analyzeFeatures(STATE state, KeyValue keyValue) {\r
                String key = keyValue.key;\r
-               Pager<DefinedTermBase> features = getTermService().findByTitleWithRestrictions(Feature.class, key, null, null, null, null, null, null);\r
+               Pager<Feature> features = getTermService().findByTitleWithRestrictions(Feature.class, key, null, null, null, null, null, null);\r
 \r
                if (features.getCount() > 1){\r
                        String message = "More than one feature found matching key " + key;\r
@@ -278,7 +277,7 @@ public abstract class ExcelTaxonOrSpecimenImportBase<STATE extends ExcelImportSt
                }else if (features.getCount() == 0){\r
                        return false;\r
                }else{\r
-                       Feature feature = CdmBase.deproxy(features.getRecords().get(0), Feature.class);\r
+                       Feature feature = CdmBase.deproxy(features.getRecords().get(0));\r
                        ROW row = state.getCurrentRow();\r
                        if ( keyValue.isKeyData()){\r
                                row.putFeature(feature.getUuid(), keyValue.index, keyValue.value);\r
index 474ce82c3044a7cb04593ea86cbda0014756ef74..59d46a435cea571f675141ebbe5a0c0835d56a07 100644 (file)
@@ -76,7 +76,7 @@ public class DistributionImport
                if (logger.isDebugEnabled()) { logger.debug("Importing distribution data"); }\r
 \r
                // read and save all rows of the excel worksheet\r
-               ArrayList<HashMap<String, String>> recordList;\r
+               List<Map<String, String>> recordList;\r
                URI source = state.getConfig().getSource();\r
                try{\r
                recordList = ExcelUtils.parseXLS(source);\r
@@ -88,7 +88,7 @@ public class DistributionImport
                        return;\r
                }\r
        if (recordList != null) {\r
-               HashMap<String,String> record = null;\r
+               Map<String,String> record = null;\r
                TransactionStatus txStatus = startTransaction();\r
 \r
                for (int i = 0; i < recordList.size(); i++) {\r
@@ -113,7 +113,7 @@ public class DistributionImport
        /**\r
         *  Reads the data of one Excel sheet row\r
         */\r
-    private void analyzeRecord(HashMap<String,String> record) {\r
+    private void analyzeRecord(Map<String,String> record) {\r
        /*\r
         * Relevant columns:\r
         * Name (EDIT)\r
index d04dfd24ac96f5f8d5117a157d3ebb1ca03818b2..8bbf889d1c7bb21a8e13d8d80d763ff1750fc8c9 100644 (file)
@@ -18,11 +18,11 @@ import eu.etaxonomy.cdm.io.excel.common.ExcelTaxonOrSpecimenImportBase;
 /**
  * @author a.babadshanjan
  * @since 09.01.2009
- * @version 1.0
  */
 public abstract class TaxonExcelImportBase
-extends ExcelTaxonOrSpecimenImportBase<TaxonExcelImportState, ExcelImportConfiguratorBase, ExcelRowBase> {
-       @SuppressWarnings("unused")
+        extends ExcelTaxonOrSpecimenImportBase<TaxonExcelImportState, ExcelImportConfiguratorBase, ExcelRowBase> {
+
+    @SuppressWarnings("unused")
        private static final Logger logger = Logger.getLogger(TaxonExcelImportBase.class);
 
        /*
index b24099b45e4ed5a6dafadc0afe65a304c35b853b..2e2bf5f2177a2c9a1450caba48b69a668b63913e 100644 (file)
@@ -11,8 +11,8 @@ package eu.etaxonomy.cdm.io.media.in;
 import java.net.URI;
 import java.util.ArrayList;
 import java.util.Arrays;
-import java.util.HashMap;
 import java.util.List;
+import java.util.Map;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
@@ -68,7 +68,7 @@ public class MediaExcelImport
      * {@inheritDoc}
      */
     @Override
-    protected void analyzeRecord(HashMap<String, String> record, MediaExcelImportState state) {
+    protected void analyzeRecord(Map<String, String> record, MediaExcelImportState state) {
         // do nothing
     }
 
@@ -77,7 +77,7 @@ public class MediaExcelImport
      */
     @Override
     protected void firstPass(MediaExcelImportState state) {
-        HashMap<String, String> record = state.getOriginalRecord();
+        Map<String, String> record = state.getOriginalRecord();
         String line = "row " + state.getCurrentLine() + ": ";
         String linePure = "row " + state.getCurrentLine();
         System.out.println(linePure);
@@ -127,22 +127,15 @@ public class MediaExcelImport
 
         //date
         String dateStr = record.get(COL_DATE);
-        if (isNotBlank(artistStr)){
+        if (isNotBlank(dateStr)){
             TimePeriod timePeriod = TimePeriodParser.parseString(dateStr);
             if (timePeriod.getFreeText()!=  null){
                 String message = "Date could not be parsed: %s";
                 message = String.format(message, dateStr);
                 state.getResult().addWarning(message, null, line);
             }
-            if (timePeriod.getEnd() !=  null){
-                String message = "Date is a period with an end date. Periods are currently not yet supported: %s";
-                message = String.format(message, dateStr);
-                state.getResult().addWarning(message, null, line);
-            }
 
-            Partial start = timePeriod.getStart();
-            DateTime dateTime = toDateTime(state, start, dateStr, line);
-            media.setMediaCreated(TimePeriod.NewInstance(dateTime));
+            media.setMediaCreated(timePeriod);
         }
 
         //URLs
@@ -279,7 +272,7 @@ public class MediaExcelImport
      */
     private List<URI> getUrls(MediaExcelImportState state, String line) {
         List<URI> list = new ArrayList<>();
-        HashMap<String, String> record = state.getOriginalRecord();
+        Map<String, String> record = state.getOriginalRecord();
         for (String str : record.keySet()){
             if (str.equals("url") || str.matches("url_size\\d+") ){
                 String url = record.get(str);
@@ -333,7 +326,7 @@ public class MediaExcelImport
         }
 
         Person result = (Person)getDeduplicationHelper(state).getExistingAuthor(null, person);
-        return person;
+        return result;
     }
 
     /**
index ed56722f7802d6a54ed8f331a9e819f868297b2d..f185ae76c89bc87e89b7417d997935d6a9735d7a 100755 (executable)
@@ -1113,6 +1113,7 @@ public abstract class SpecimenImportBase<CONFIG extends IImportConfigurator, STA
                        name.addTypeDesignation(designation, true);
                    }
                }
+               save(state.getDerivedUnitBase(), state);
 
                for (String[] fullReference : state.getDataHolder().getReferenceList()) {
 
index 33c6312ba2c70945b8911609e367259180f8bf1a..7cb6a707be2008abfa8329e2089604ddbdd34b8d 100644 (file)
@@ -94,11 +94,14 @@ public class UnitsGatheringEvent {
      * @param collectorNames
      */
     public UnitsGatheringEvent(ITermService termService, String locality, String languageIso, Double longitude,
-            Double latitude, String elevationText, String elevationMin, String elevationMax, String elevationUnit,
+            Double latitude, String errorRadius, String elevationText, String elevationMin, String elevationMax, String elevationUnit,
             String date, String gatheringNotes, String gatheringMethod, ReferenceSystem referenceSystem,
              Abcd206ImportConfigurator config) {
         this.setLocality(termService, locality, languageIso);
-        this.setCoordinates(longitude, latitude, referenceSystem);
+
+        Integer errorRadiusInt = Integer.getInteger(errorRadius);
+
+        this.setCoordinates(longitude, latitude, referenceSystem, errorRadiusInt);
         this.setDate(date);
         this.setNotes(gatheringNotes);
         this.setElevation(elevationText, elevationMin, elevationMax, elevationUnit);
@@ -153,10 +156,10 @@ public class UnitsGatheringEvent {
      * @param: latitude
      */
     public void setCoordinates(Double longitude, Double latitude){
-        setCoordinates(longitude, latitude, null);
+        setCoordinates(longitude, latitude, null, null);
     }
 
-    public void setCoordinates(Double longitude, Double latitude, ReferenceSystem referenceSystem){
+    public void setCoordinates(Double longitude, Double latitude, ReferenceSystem referenceSystem, Integer errorRadius){
         //create coordinates point
         if((longitude == null) || (latitude == null)){
             return;
@@ -169,6 +172,9 @@ public class UnitsGatheringEvent {
         if (latitude != 0.0) {
             coordinates.setLatitude(latitude);
         }
+        if (errorRadius != 0) {
+            coordinates.setErrorRadius(errorRadius);
+        }
         coordinates.setReferenceSystem(referenceSystem);
         this.gatheringEvent.setExactLocation(coordinates);
 
index 1891ee027fe6e3607b52f312b2ca7d90c5a24ca9..85297eb187ceb52fb53129a312705f5735e251ff 100644 (file)
@@ -9,6 +9,7 @@
 
 package eu.etaxonomy.cdm.io.specimen.abcd206.in;
 
+import java.io.IOException;
 import java.io.InputStream;
 import java.net.MalformedURLException;
 import java.net.URI;
@@ -27,6 +28,7 @@ import org.w3c.dom.NodeList;
 
 import eu.etaxonomy.cdm.api.application.ICdmRepository;
 import eu.etaxonomy.cdm.api.facade.DerivedUnitFacade;
+import eu.etaxonomy.cdm.common.StreamUtils;
 import eu.etaxonomy.cdm.ext.occurrence.bioCase.BioCaseQueryServiceWrapper;
 import eu.etaxonomy.cdm.hibernate.HibernateProxyHelper;
 import eu.etaxonomy.cdm.io.common.ICdmIO;
@@ -53,6 +55,8 @@ import eu.etaxonomy.cdm.model.common.TermType;
 import eu.etaxonomy.cdm.model.common.TermVocabulary;
 import eu.etaxonomy.cdm.model.location.NamedArea;
 import eu.etaxonomy.cdm.model.media.Media;
+import eu.etaxonomy.cdm.model.media.Rights;
+import eu.etaxonomy.cdm.model.media.RightsType;
 import eu.etaxonomy.cdm.model.molecular.DnaSample;
 import eu.etaxonomy.cdm.model.occurrence.Collection;
 import eu.etaxonomy.cdm.model.occurrence.DerivationEvent;
@@ -66,6 +70,7 @@ import eu.etaxonomy.cdm.model.occurrence.SpecimenOrObservationType;
 import eu.etaxonomy.cdm.model.reference.Reference;
 import eu.etaxonomy.cdm.model.reference.ReferenceFactory;
 import eu.etaxonomy.cdm.model.taxon.Classification;
+import eu.etaxonomy.cdm.strategy.parser.TimePeriodParser;
 
 /**
  * @author p.kelbert
@@ -296,11 +301,19 @@ public class Abcd206Import extends SpecimenImportBase<Abcd206ImportConfigurator,
         } finally {
             state.getReport().printReport(state.getConfig().getReportUri());
         }
-
         if (state.getConfig().isDownloadSequenceData()) {
-            // download fasta files for imported sequences
-            // TODO: where to store the files and how to create the new Blast DB
+            for (URI uri:state.getSequenceDataStableIdentifier()){
+                // Files.createDirectories(file.getParent()); // optional, make sure parent dir exists
+                try {
+                    StreamUtils.downloadFile(uri.toURL(), "temp");
+                } catch (IOException e) {
+                    // TODO Auto-generated catch block
+                    e.printStackTrace();
+                }
+
+            }
         }
+
         return;
     }
 
@@ -489,7 +502,7 @@ public class Abcd206Import extends SpecimenImportBase<Abcd206ImportConfigurator,
             // gathering event
             UnitsGatheringEvent unitsGatheringEvent = new UnitsGatheringEvent(cdmAppController.getTermService(),
                     state.getDataHolder().locality, state.getDataHolder().languageIso, state.getDataHolder().longitude,
-                    state.getDataHolder().latitude, state.getDataHolder().getGatheringElevationText(),
+                    state.getDataHolder().latitude, state.getDataHolder().getGatheringCoordinateErrorMethod() , state.getDataHolder().getGatheringElevationText(),
                     state.getDataHolder().getGatheringElevationMin(), state.getDataHolder().getGatheringElevationMax(),
                     state.getDataHolder().getGatheringElevationUnit(), state.getDataHolder().getGatheringDateText(),
                     state.getDataHolder().getGatheringNotes(), state.getDataHolder().getGatheringMethod(),
@@ -614,6 +627,28 @@ public class Abcd206Import extends SpecimenImportBase<Abcd206ImportConfigurator,
                             }
 
                         }
+                        if (attributes.containsKey("CreateDate")) {
+                            String createDate = attributes.get("CreateDate");
+
+                            if (createDate != null) {
+
+                               media.setMediaCreated(TimePeriodParser.parseString(createDate));
+                            }
+
+                        }
+
+
+                        if (attributes.containsKey("License")) {
+                            String licence = attributes.get("License");
+
+                            if (licence != null) {
+                               Rights right = Rights.NewInstance(licence, Language.ENGLISH(), RightsType.LICENSE());
+                               media.addRights(right);
+                            }
+
+                        }
+
+
 
                         derivedUnitFacade.addDerivedUnitMedia(media);
                         if (state.getConfig().isAddMediaAsMediaSpecimen()) {
@@ -1133,7 +1168,13 @@ public class Abcd206Import extends SpecimenImportBase<Abcd206ImportConfigurator,
                 type = SpecimenOrObservationType.PreservedSpecimen;
             } else if (state.getDataHolder().getRecordBasis().toLowerCase().startsWith("o")
                     || state.getDataHolder().getRecordBasis().toLowerCase().indexOf("observation") > -1) {
-                type = SpecimenOrObservationType.Observation;
+                if (state.getDataHolder().getRecordBasis().toLowerCase().contains("machine") && state.getDataHolder().getRecordBasis().toLowerCase().contains("observation")){
+                    type = SpecimenOrObservationType.MachineObservation;
+                }else if (state.getDataHolder().getRecordBasis().toLowerCase().contains("human") && state.getDataHolder().getRecordBasis().toLowerCase().contains("observation")){
+                    type = SpecimenOrObservationType.HumanObservation;
+                }else{
+                    type = SpecimenOrObservationType.Observation;
+                }
             } else if (state.getDataHolder().getRecordBasis().toLowerCase().indexOf("fossil") > -1) {
                 type = SpecimenOrObservationType.Fossil;
             } else if (state.getDataHolder().getRecordBasis().toLowerCase().indexOf("materialsample") > -1) {
index dd7b72ca3462a970a875c9bcf90125a80b19c665..654de006b892a3d4a6c883bf72d198afad8882f8 100755 (executable)
@@ -71,6 +71,7 @@ public class Abcd206ImportParser {
             abcdFieldGetter.getGatheringImages(root);
             abcdFieldGetter.getGatheringMethod(root);
             abcdFieldGetter.getAssociatedUnitIds(root);
+            abcdFieldGetter.getPreparation(root);
             abcdFieldGetter.getUnitNotes(root);
             boolean referencefound = abcdFieldGetter.getReferences(root);
 //            if (!referencefound && state.getRef() != null) {
index cfce07ca5448fcb42ae32b78dc827c9c9676520f..a8a563490a3233da3918220a6cb9ab1e7a5d1ccd 100644 (file)
@@ -42,6 +42,7 @@ public class Abcd206ImportState
        private Set<URI> sequenceDataStableIdentifier = new HashSet<>();
 
 
+
 //****************** CONSTRUCTOR ***************************************************/
 
 
index 0ca5a04ac8f2fffe008310a55a186eb48f1d1850..a50390fc2e2ff896aa95f2be5185c59b17483d6a 100644 (file)
@@ -44,6 +44,13 @@ public class Abcd206XMLFieldGetter {
                         .equals(prefix + "SpecimenUnit")) {
                     types = results.item(k).getChildNodes();
                     for (int l = 0; l < types.getLength(); l++) {
+                        /*
+                         *NomenclaturalTypeDesignations>
+                         *<abcd21:NomenclaturalTypeDesignation>
+                         *<abcd21:TypeStatus>epitype</abcd21:TypeStatus>
+                         *</abcd21:NomenclaturalTypeDesignation>
+                         *</abcd21:NomenclaturalTypeDesignations>
+                         */
                         if (types.item(l).getNodeName().equals(prefix+ "NomenclaturalTypeDesignations")) {
                             ntds = types.item(l).getChildNodes();
                             for (int m = 0; m < ntds.getLength(); m++) {
@@ -58,10 +65,10 @@ public class Abcd206XMLFieldGetter {
                                             }
                                             dataHolder.getStatusList().add(getSpecimenTypeDesignationStatusByKey(type));
                                             typeFound=true;
-                                            path = ntd.item(l).getNodeName();
-                                            getHierarchie(ntd.item(l));
-                                            dataHolder.knownABCDelements.add(path);
-                                            path = "";
+                                          //  path = ntd.item(n).getNodeName();
+                                         //   getHierarchie(ntd.item(l));
+                                          //  dataHolder.knownABCDelements.add(path);
+                                           // path = "";
                                         }
                                     }
                                 }
@@ -74,6 +81,7 @@ public class Abcd206XMLFieldGetter {
                 }
             }
         } catch (NullPointerException e) {
+            System.err.println(e.getMessage());
             dataHolder.setStatusList(new ArrayList<SpecimenTypeDesignationStatus>());
         }
     }
@@ -210,7 +218,7 @@ public class Abcd206XMLFieldGetter {
                     // logger.info("TMP NAME P" + tmpName);
                     dataHolder.getIdentificationList().add(new Identification(tmpName, preferredFlag, dataHolder.getNomenclatureCode(), identifierStr, dateStr));
                 } else {
-                    dataHolder.getIdentificationList().add(new Identification(tmpName, preferredFlag, dateStr));
+                    dataHolder.getIdentificationList().add(new Identification(tmpName, preferredFlag,identifierStr, dateStr));
                 }
             }
         }
@@ -238,7 +246,7 @@ public class Abcd206XMLFieldGetter {
                             && dataHolder.getNomenclatureCode() != "") {
                         dataHolder.getIdentificationList().add(new Identification(tmpName, "0", dataHolder.getNomenclatureCode(), null, dateStr));
                     } else {
-                        dataHolder.getIdentificationList().add(new Identification(tmpName, "0", dateStr));
+                        dataHolder.getIdentificationList().add(new Identification(tmpName, "0", null, dateStr));
                     }
                 }
             }
@@ -276,6 +284,15 @@ public class Abcd206XMLFieldGetter {
 
         for (int k = 0; k < results.getLength(); k++) {
             if (results.item(k).getNodeName().equals(prefix + "Identifier")) {
+                /*
+                 * <abcd21:Identifiers>
+                 * <abcd21:Identifier>
+                 * <abcd21:PersonName>
+                 * <abcd21:FullName>R. Jahn</abcd21:FullName>
+                 * </abcd21:PersonName>
+                 * </abcd21:Identifier>
+                 * </abcd21:Identifiers>
+                 */
                 identifier = results.item(k).getChildNodes();
                 for (int l = 0; l < identifier.getLength(); l++) {
                     if (identifier.item(l).getNodeName().equals(prefix + "PersonName")) {
@@ -663,6 +680,16 @@ public class Abcd206XMLFieldGetter {
         } catch (NullPointerException e) {
             dataHolder.latitude = null;
         }
+        try {
+            group = root.getElementsByTagName(prefix + "CoordinateErrorDistanceInMeters");
+            path = group.item(0).getNodeName();
+            getHierarchie(group.item(0));
+            dataHolder.knownABCDelements.add(path);
+            path = "";
+            dataHolder.setGatheringCoordinateErrorMethod(group.item(0).getTextContent());
+        } catch (NullPointerException e) {
+            dataHolder.latitude = null;
+        }
         try {
             group = root.getElementsByTagName(prefix + "Country");
             childs = group.item(0).getChildNodes();
@@ -843,7 +870,7 @@ public class Abcd206XMLFieldGetter {
      * @param root
      */
     protected void getMultimedia(Element root) {
-        NodeList group, multimedias, multimedia, creators, creator, copyRightNodes, iprNodes, textNodes, licences, copyrights;
+        NodeList group, multimedias, multimedia, creators, creator, copyRightNodes, iprNodes, textNodes, licenceNodes, licences, copyrights;
         try {
             group = root.getElementsByTagName(prefix + "MultiMediaObjects");
             for (int i = 0; i < group.getLength(); i++) {
@@ -853,70 +880,114 @@ public class Abcd206XMLFieldGetter {
                     if (multimedias.item(j).getNodeName().equals(prefix + "MultiMediaObject")) {
                         multimedia = multimedias.item(j).getChildNodes();
                         Map<String,String> mediaObjectMap = new HashMap<String, String>();
-                        String fileUri = "";
-                        for (int k = 0; k < multimedia.getLength(); k++) {
-                            if (multimedia.item(k).getNodeName().equalsIgnoreCase(prefix + "fileURI")) {
-                                fileUri = multimedia.item(k).getTextContent();
-                                mediaObjectMap.put("fileUri", fileUri);
-                                path = multimedia.item(k).getNodeName();
-                                getHierarchie(multimedia.item(k));
-                                dataHolder.knownABCDelements.add(path);
-                                path = "";
-                            } else if (multimedia.item(k).getNodeName().equalsIgnoreCase(prefix + "context")) {
-                                mediaObjectMap.put("Context", multimedia.item(k).getTextContent());
-                            } else if (multimedia.item(k).getNodeName().equalsIgnoreCase(prefix + "Creators")){
-                                String creatorString = "";
-                                creators = multimedia.item(k).getChildNodes();
-                                for (int l = 0; l < creators.getLength(); l++) {
-
-                                    if (creators.item(l).getNodeName().equalsIgnoreCase(prefix + "Creator")){
-                                        if (creatorString != ""){
-                                            creatorString += ", ";
-                                        }
-                                       creatorString += creators.item(l).getTextContent();
+
+                        String fileUri = extractMediaInformation(multimedia, mediaObjectMap);
+                        if (fileUri != ""){
+                            dataHolder.putMultiMediaObject(fileUri,mediaObjectMap);
+                        }
+                    }
+                }
+            }
+        } catch (NullPointerException e) {
+            logger.info(e);
+        }
+    }
+
+
+    /**
+     * @param multimedia
+     * @param mediaObjectMap
+     * @param fileUri
+     * @return
+     */
+    private String extractMediaInformation(NodeList multimedia, Map<String, String> mediaObjectMap) {
+        NodeList creators;
+        NodeList copyRightNodes;
+        NodeList iprNodes;
+        NodeList licenceNodes;
+        NodeList license;
+        NodeList copyrights;
+        String fileUri = "";
+        for (int k = 0; k < multimedia.getLength(); k++) {
+            if (multimedia.item(k).getNodeName().equalsIgnoreCase(prefix + "fileURI")) {
+                fileUri = multimedia.item(k).getTextContent();
+                mediaObjectMap.put("fileUri", fileUri);
+                path = multimedia.item(k).getNodeName();
+                getHierarchie(multimedia.item(k));
+                dataHolder.knownABCDelements.add(path);
+                path = "";
+            } else if (multimedia.item(k).getNodeName().equalsIgnoreCase(prefix + "context")) {
+                mediaObjectMap.put("Context", multimedia.item(k).getTextContent());
+            } else if (multimedia.item(k).getNodeName().equalsIgnoreCase(prefix + "createDate")) {
+                mediaObjectMap.put("CreateDate", multimedia.item(k).getTextContent());
+            }else if (multimedia.item(k).getNodeName().equalsIgnoreCase(prefix + "Creators")){
+                String creatorString = "";
+                creators = multimedia.item(k).getChildNodes();
+                for (int l = 0; l < creators.getLength(); l++) {
+
+                    if (creators.item(l).getNodeName().equalsIgnoreCase(prefix + "Creator")){
+                        if (creatorString != ""){
+                            creatorString += ", ";
+                        }
+                       creatorString += creators.item(l).getTextContent();
+                    }
+                }
+                mediaObjectMap.put("Creators",creatorString);
+            } else if (multimedia.item(k).getNodeName().equalsIgnoreCase(prefix + "Creator")){
+                mediaObjectMap.put("Creators",multimedia.item(k).getTextContent());
+            } else if (multimedia.item(k).getNodeName().equals("CreatedDate")){
+                mediaObjectMap.put("CreatedDate",multimedia.item(k).getTextContent());
+            } else if (multimedia.item(k).getNodeName().equalsIgnoreCase(prefix + "comment")){
+                mediaObjectMap.put("Comment",multimedia.item(k).getTextContent());
+            } else if (multimedia.item(k).getNodeName().equalsIgnoreCase(prefix + "IPR")){
+                String copyRightString = "";
+                iprNodes = multimedia.item(k).getChildNodes();
+                for (int l = 0; l < iprNodes.getLength(); l++) {
+                    if (iprNodes.item(l).getNodeName().equalsIgnoreCase(prefix + "Copyrights")){
+                        copyRightNodes = iprNodes.item(l).getChildNodes();
+                        for (int m = 0; m < copyRightNodes.getLength(); m++) {
+                            if (copyRightNodes.item(l).getNodeName().equalsIgnoreCase(prefix + "Copyright")){
+                                copyrights = copyRightNodes.item(l).getChildNodes();
+                                for (int n = 0; n < copyrights.getLength(); n++){
+                                    if (copyrights.item(n).getNodeName().equalsIgnoreCase(prefix + "text")){
+                                        mediaObjectMap.put("Copyright", copyrights.item(n).getTextContent());
                                     }
                                 }
-                                mediaObjectMap.put("Creators",creatorString);
-                            } else if (multimedia.item(k).getNodeName().equalsIgnoreCase(prefix + "Creator")){
-                                mediaObjectMap.put("Creators",multimedia.item(k).getTextContent());
-                            } else if (multimedia.item(k).getNodeName().equals("CreatedDate")){
-                                mediaObjectMap.put("CreatedDate",multimedia.item(k).getTextContent());
-                            } else if (multimedia.item(k).getNodeName().equalsIgnoreCase(prefix + "comment")){
-                                mediaObjectMap.put("Comment",multimedia.item(k).getTextContent());
-                            } else if (multimedia.item(k).getNodeName().equalsIgnoreCase(prefix + "IPR")){
-                                String copyRightString = "";
-                                iprNodes = multimedia.item(k).getChildNodes();
-                                for (int l = 0; l < iprNodes.getLength(); l++) {
-                                    if (iprNodes.item(l).getNodeName().equalsIgnoreCase(prefix + "Copyrights")){
-                                        copyRightNodes = iprNodes.item(l).getChildNodes();
-                                        for (int m = 0; m < copyRightNodes.getLength(); m++) {
-                                            if (copyRightNodes.item(l).getNodeName().equalsIgnoreCase(prefix + "Copyright")){
-                                                copyrights = copyRightNodes.item(l).getChildNodes();
-                                                for (int n = 0; n < copyrights.getLength(); n++){
-                                                    if (copyrights.item(n).getNodeName().equalsIgnoreCase(prefix + "text")){
-                                                           //TODO: decide whether this is the copyright owner or a description text
-                                                    }
-                                                }
+                            }
+                        }
+                    } else if (iprNodes.item(l).getNodeName().equalsIgnoreCase(prefix + "Licenses")){
+                        licenceNodes = iprNodes.item(l).getChildNodes();
+                        for (int m = 0; m < licenceNodes.getLength(); m++) {
+                            if (licenceNodes.item(l).getNodeName().equalsIgnoreCase(prefix + "License")){
+                                license = licenceNodes.item(l).getChildNodes();
+                                for (int n = 0; n < license.getLength(); n++){
+                                    if (license.item(n).getNodeName().equalsIgnoreCase(prefix + "Text")){
+                                        mediaObjectMap.put("License", license.item(n).getTextContent());
+                                    }else{
+                                        Node node = license.item(n);
+                                        NodeList children = node.getChildNodes();
+                                        for (int o = 0; o < children.getLength(); o++){
+                                            if (children.item(n).getNodeName().equalsIgnoreCase(prefix + "Text")){
+                                                mediaObjectMap.put("License", children.item(n).getTextContent());
                                             }
+
                                         }
                                     }
                                 }
-
-
-
-                              // TODO: mediaObjectMap.put("IPR",multimedia.item(k).getTextContent());
                             }
-
-                        }
-                        if (fileUri != ""){
-                            dataHolder.putMultiMediaObject(fileUri,mediaObjectMap);
                         }
+                    }else{
+                        System.err.println(iprNodes.item(l).getNodeName());
                     }
+
                 }
+
+            } else{
+                System.err.println(multimedia.item(k).getNodeName());
             }
-        } catch (NullPointerException e) {
-            logger.info(e);
+
         }
+        return fileUri;
     }
 
     protected void getAssociatedUnitIds(Element root) {
@@ -1082,7 +1153,7 @@ public class Abcd206XMLFieldGetter {
                     if (children.item(j).getNodeName().equals(prefix + "DateTime")) {
                         NodeList dateTimes = children.item(j).getChildNodes();
                         for (int k = 0; k < dateTimes.getLength(); k++) {
-                            if (dateTimes.item(k).getNodeName().equals(prefix + "DateText")) {
+                            if (dateTimes.item(k).getNodeName().equals(prefix + "ISODateTimeBegin")) {
                                 path = dateTimes.item(k).getNodeName();
                                 getHierarchie(dateTimes.item(k));
                                 dataHolder.knownABCDelements.add(path);
@@ -1462,38 +1533,8 @@ public class Abcd206XMLFieldGetter {
                     if (multimedias.item(j).getNodeName().equals(prefix + "SiteImage")) {
                         multimedia = multimedias.item(j).getChildNodes();
                         Map<String,String> mediaObjectMap = new HashMap<String, String>();
-                        String fileUri = "";
-                        for (int k = 0; k < multimedia.getLength(); k++) {
-
+                        String fileUri = extractMediaInformation(multimedia, mediaObjectMap);
 
-                            if (multimedia.item(k).getNodeName().equalsIgnoreCase(prefix + "fileURI")) {
-                                fileUri = multimedia.item(k).getTextContent();
-                                mediaObjectMap.put("fileUri", fileUri);
-                                path = multimedia.item(k).getNodeName();
-                                getHierarchie(multimedia.item(k));
-                                dataHolder.knownABCDelements.add(path);
-                                path = "";
-                            } else if (multimedia.item(k).getNodeName().equalsIgnoreCase(prefix + "Context")) {
-                                mediaObjectMap.put("Context", multimedia.item(k).getTextContent());
-                            }else if (multimedia.item(k).getNodeName().equalsIgnoreCase(prefix + "Comment")) {
-                                mediaObjectMap.put("Comment", multimedia.item(k).getTextContent());
-                            } else if (multimedia.item(k).getNodeName().equalsIgnoreCase(prefix + "Creators")){
-                                String creatorString = "";
-                                creators = multimedia.item(k).getChildNodes();
-                                for (int l = 0; l < creators.getLength(); l++) {
-
-                                    if (creators.item(l).getNodeName().equalsIgnoreCase(prefix + "Creator")){
-                                        if (creatorString != ""){
-                                            creatorString += ", ";
-                                        }
-                                       creatorString += creators.item(l).getTextContent();
-                                    }
-                                }
-                                mediaObjectMap.put("Creators",creatorString);
-                            } else if (multimedia.item(k).getNodeName().equalsIgnoreCase(prefix + "Creator")){
-                                mediaObjectMap.put("Creators",multimedia.item(k).getTextContent());
-                            }
-                        }
                         if (fileUri != ""){
                             dataHolder.putGatheringMultiMediaObject(fileUri,mediaObjectMap);
                         }
index f02699c427496d35b981518edf3bd796ea799c15..41cf5b9350c5846cca94ceb5b9aa20d869f0c551 100644 (file)
@@ -21,8 +21,8 @@ public class Identification {
     private final String code;
     private final String date;
 
-    public Identification(String taxonName, String preferred, String date) {
-        this(taxonName, preferred, null, null, date);
+    public Identification(String taxonName, String preferred, String identifier, String date) {
+        this(taxonName, preferred, null, identifier, date);
     }
 
     public Identification(String scientificName, String preferred, String code, String identifier, String date) {
index 0fb86493b2da9132642f97c8572e6ec25b55795f..af45611f3bea5924c7f2ea8350fee2981586a565 100644 (file)
@@ -22,7 +22,6 @@ import eu.etaxonomy.cdm.io.specimen.abcd206.in.Abcd206ImportState;
 import eu.etaxonomy.cdm.io.specimen.abcd206.in.AbcdParseUtility;
 import eu.etaxonomy.cdm.io.specimen.abcd206.in.SpecimenImportReport;
 import eu.etaxonomy.cdm.model.common.DefinedTerm;
-import eu.etaxonomy.cdm.model.common.DefinedTermBase;
 import eu.etaxonomy.cdm.model.common.OrderedTerm;
 import eu.etaxonomy.cdm.model.media.Media;
 import eu.etaxonomy.cdm.model.molecular.Amplification;
@@ -174,9 +173,9 @@ public class AbcdGgbnParser {
                     if(markerList.item(0)!=null){
                         String amplificationMarker = markerList.item(0).getTextContent();
                         DefinedTerm dnaMarker = null;
-                        List<DefinedTermBase> markersFound = cdmAppController.getTermService().findByTitleWithRestrictions(DefinedTerm.class, amplificationMarker, MatchMode.EXACT, null, null, null, null, null).getRecords();
+                        List<DefinedTerm> markersFound = cdmAppController.getTermService().findByTitleWithRestrictions(DefinedTerm.class, amplificationMarker, MatchMode.EXACT, null, null, null, null, null).getRecords();
                         if(markersFound.size()==1){
-                            dnaMarker = (DefinedTerm) markersFound.get(0);
+                            dnaMarker = markersFound.get(0);
                         }
                         else{
                             dnaMarker = DefinedTerm.NewDnaMarkerInstance(amplificationMarker, amplificationMarker, amplificationMarker);
@@ -198,9 +197,14 @@ public class AbcdGgbnParser {
 
                     //consensus sequence
                     NodeList sequencingsList = amplificationElement.getElementsByTagName(prefix+"Sequencings");
-                    if(sequencingsList.item(0)!=null && sequencingsList.item(0) instanceof Element){
-                        parseAmplificationSequencings((Element)sequencingsList.item(0), amplification, amplificationResult, dnaSample, state);
+                    if(sequencingsList.item(0)!=null) {
+                        if ( sequencingsList.item(0) instanceof Element){
+                            Element el = (Element)sequencingsList.item(0);
+                            parseAmplificationSequencings(el, amplification, amplificationResult, dnaSample, state);
+                        }
                     }
+
+
                     parseAmplificationPrimers(amplificationElement.getElementsByTagName(prefix+"AmplificationPrimers"));
                 }
             }
@@ -245,7 +249,7 @@ public class AbcdGgbnParser {
                 //contig file URL
                 NodeList consensusSequenceChromatogramFileURIList = sequencing.getElementsByTagName(prefix+"consensusSequenceChromatogramFileURI");
                 URI uri = AbcdParseUtility.parseFirstUri(consensusSequenceChromatogramFileURIList, report);
-                if (uri.toString().endsWith("fasta")){
+                if (uri != null && uri.toString().endsWith("fasta")){
                     state.putSequenceDataStableIdentifier(uri);
                 }else{
                     Media contigFile = Media.NewInstance(uri, null, null, null);
index aefd17680ea3a178d4e3f8b9eb25851f14e30528..47dc3f64cd3f41b881da0bad6a9cd3da729465c7 100644 (file)
@@ -9,7 +9,7 @@
 
 package eu.etaxonomy.cdm.io.specimen.excel.in;
 
-import java.util.HashMap;
+import java.util.Map;
 import java.util.Set;
 import java.util.UUID;
 
@@ -49,7 +49,7 @@ public class ExtensionTypeExcelImport
        }
 
        @Override
-       protected void analyzeRecord(HashMap<String, String> record, SpecimenCdmExcelImportState state) {
+       protected void analyzeRecord(Map<String, String> record, SpecimenCdmExcelImportState state) {
                Set<String> keys = record.keySet();
 
        NamedAreaLevellRow row = new NamedAreaLevellRow();
@@ -137,7 +137,7 @@ public class ExtensionTypeExcelImport
        }
 
        @Override
-    protected String getWorksheetName() {
+    protected String getWorksheetName(SpecimenCdmExcelImportConfigurator config) {
                return WORKSHEET_NAME;
        }
 
index 2cc885a87af6ed9d397e2ce19c0cd4bc0deb39fc..3c9882926689102379ad56b4124a3dab0e7f2b06 100644 (file)
@@ -9,7 +9,7 @@
 
 package eu.etaxonomy.cdm.io.specimen.excel.in;
 
-import java.util.HashMap;
+import java.util.Map;
 import java.util.Set;
 import java.util.UUID;
 
@@ -52,7 +52,7 @@ public class NamedAreaLevelExcelImport
        }
 
        @Override
-       protected void analyzeRecord(HashMap<String, String> record, SpecimenCdmExcelImportState state) {
+       protected void analyzeRecord(Map<String, String> record, SpecimenCdmExcelImportState state) {
                Set<String> keys = record.keySet();
 
        NamedAreaLevellRow row = new NamedAreaLevellRow();
@@ -152,7 +152,7 @@ public class NamedAreaLevelExcelImport
        }
 
        @Override
-    protected String getWorksheetName() {
+    protected String getWorksheetName(SpecimenCdmExcelImportConfigurator config) {
                return WORKSHEET_NAME;
        }
 
index d9f3811b084eeb3c920035e8aeea87a6f5473897..8264d8414d81e6ce27b051d7a10c86039666a8eb 100644 (file)
@@ -98,29 +98,24 @@ public class SpecimenCdmExcelImport
        private static final String REFERENCE_SYSTEM_COLUMN = "(?i)(ReferenceSystem)";
        private static final String ERROR_RADIUS_COLUMN = "(?i)(ErrorRadius)";
 
-
        private static final String COLLECTORS_NUMBER_COLUMN = "(?i)((Collectors|Field)Number)";
        private static final String ECOLOGY_COLUMN = "(?i)(Ecology|Habitat)";
        private static final String PLANT_DESCRIPTION_COLUMN = "(?i)(PlantDescription)";
        private static final String FIELD_NOTES_COLUMN = "(?i)(FieldNotes)";
        private static final String SEX_COLUMN = "(?i)(Sex)";
 
-
        private static final String ACCESSION_NUMBER_COLUMN = "(?i)(AccessionNumber)";
        private static final String BARCODE_COLUMN = "(?i)(Barcode)";
        private static final String COLLECTION_CODE_COLUMN = "(?i)(CollectionCode)";
        private static final String COLLECTION_COLUMN = "(?i)(Collection)";
        private static final String UNIT_NOTES_COLUMN = "(?i)((Unit)?Notes)";
 
-
        private static final String TYPE_CATEGORY_COLUMN = "(?i)(TypeCategory)";
        private static final String TYPIFIED_NAME_COLUMN = "(?i)(TypifiedName|TypeOf)";
 
-
        private static final String SOURCE_COLUMN = "(?i)(Source)";
        private static final String ID_IN_SOURCE_COLUMN = "(?i)(IdInSource)";
 
-
        private static final String DETERMINATION_AUTHOR_COLUMN = "(?i)(Author)";
        private static final String DETERMINATION_MODIFIER_COLUMN = "(?i)(DeterminationModifier)";
        private static final String DETERMINED_BY_COLUMN = "(?i)(DeterminationBy)";
@@ -128,14 +123,10 @@ public class SpecimenCdmExcelImport
        private static final String DETERMINATION_NOTES_COLUMN = "(?i)(DeterminationNote)";
        private static final String EXTENSION_COLUMN = "(?i)(Ext(ension)?)";
 
-
        public SpecimenCdmExcelImport() {
                super();
        }
 
-
-
-
        @Override
        protected void analyzeSingleValue(KeyValue keyValue, SpecimenCdmExcelImportState state) {
                SpecimenRow row = state.getCurrentRow();
@@ -246,7 +237,6 @@ public class SpecimenCdmExcelImport
                        }else{
                                logger.warn("Extension without postfix not yet implemented");
                        }
-
                }else {
                        state.setUnsuccessfull();
                        logger.error("Unexpected column header " + keyValue.originalKey);
@@ -255,7 +245,6 @@ public class SpecimenCdmExcelImport
        return;
        }
 
-
        @Override
        protected void firstPass(SpecimenCdmExcelImportState state) {
                SpecimenRow row = state.getCurrentRow();
@@ -270,7 +259,6 @@ public class SpecimenCdmExcelImport
                }
                DerivedUnitFacade facade = DerivedUnitFacade.NewInstance(type);
 
-
                Language lang = Language.DEFAULT();
                if (StringUtils.isNotBlank(row.getLanguage())){
                        Language langIso = getTermService().getLanguageByIso(row.getLanguage());
@@ -373,8 +361,6 @@ public class SpecimenCdmExcelImport
                        return;
 
                }
-
-
        }
 
        private void handleAreas(DerivedUnitFacade facade, SpecimenRow row, SpecimenCdmExcelImportState state) {
@@ -432,7 +418,6 @@ public class SpecimenCdmExcelImport
                        }
                }
 
-
                for (DeterminationLight determinationLight : row.getDetermination()){
                        Taxon taxon;
                        if (! hasCommonTaxonInfo){
@@ -456,7 +441,6 @@ public class SpecimenCdmExcelImport
                                        if (state.getConfig().isUseMaterialsExaminedForIndividualsAssociations()){
                                                feature = Feature.MATERIALS_EXAMINED();
                                        }
-
                                        indivAssociciation.setFeature(feature);
                                }
                                if (state.getConfig().isDeterminationsAreDeterminationEvent()){
@@ -497,7 +481,7 @@ public class SpecimenCdmExcelImport
 
                //name
                INonViralName name;
-               INonViralNameParser parser = NonViralNameParserImpl.NewInstance();
+               INonViralNameParser<INonViralName> parser = NonViralNameParserImpl.NewInstance();
                NomenclaturalCode nc = state.getConfig().getNomenclaturalCode();
                if (StringUtils.isNotBlank(commonDetermination.fullName)){
                        name = parser.parseFullName(commonDetermination.fullName, nc, rank);
@@ -561,12 +545,8 @@ public class SpecimenCdmExcelImport
 
                //return
                return taxon;
-
        }
 
-
-
-
        private void setAuthorship(INonViralName name, String author, INonViralNameParser<INonViralName> parser) {
                if (name.isBotanical() || name.isZoological()){
                        try {
@@ -579,13 +559,10 @@ public class SpecimenCdmExcelImport
                }
        }
 
-
-
        /**
         * This method tries to find the best matching taxon depending on the import configuration,
         * the taxon name information and the concept information available.
         *
-        *
         * @param state
         * @param determinationLight
         * @param createIfNotExists
@@ -635,7 +612,6 @@ public class SpecimenCdmExcelImport
                        if (StringUtils.isNotBlank(computedTitleCache)){
                                titleCache = computedTitleCache;
                        }
-
                }
                return titleCache;
        }
@@ -700,10 +676,8 @@ public class SpecimenCdmExcelImport
                }else{
                        return null;
                }
-
        }
 
-
        private DeterminationEvent makeDeterminationEvent(SpecimenCdmExcelImportState state, DeterminationLight determination, Taxon taxon) {
                DeterminationEvent event = DeterminationEvent.NewInstance();
                //taxon
@@ -761,7 +735,7 @@ public class SpecimenCdmExcelImport
                        return null;
                }
                AgentBase<?> collector = facade.getCollector();
-               List<Person> collectors = new ArrayList<Person>();
+               List<Person> collectors = new ArrayList<>();
                if (collector.isInstanceOf(Team.class) ){
                        Team team = CdmBase.deproxy(collector, Team.class);
                        collectors.addAll(team.getTeamMembers());
@@ -819,8 +793,6 @@ public class SpecimenCdmExcelImport
                return result;
        }
 
-
-
        private Collection getOrMakeCollection(SpecimenCdmExcelImportState state, String collectionCode, String collectionString) {
                Collection result = state.getCollection(collectionCode);
                if (result == null){
@@ -848,7 +820,6 @@ public class SpecimenCdmExcelImport
                        NonViralNameParserImpl parser = NonViralNameParserImpl.NewInstance();
                        NomenclaturalCode code = state.getConfig().getNomenclaturalCode();
                        result = (TaxonName)parser.parseFullName(name, code, null);
-
                }
                if (result != null){
                        state.putName(name, result);
@@ -869,8 +840,6 @@ public class SpecimenCdmExcelImport
                } catch (UndefinedTransformerMethodException e) {
                        throw new RuntimeException("getSpecimenTypeDesignationStatusByKey not yet implemented");
                }
-
-
        }
 
 
@@ -899,8 +868,6 @@ public class SpecimenCdmExcelImport
                        }
                }
 
-
-
                // lat/ long /error
                try {
                        String longitude = row.getLongitude();
@@ -921,14 +888,9 @@ public class SpecimenCdmExcelImport
                        String message = "Problems when parsing exact location for line %d";
                        message = String.format(message, state.getCurrentLine());
                        logger.warn(message);
-
                }
-
-
-
        }
 
-
        /*
         * Set the current Country
         * Search in the DB if the isoCode is known
@@ -972,7 +934,7 @@ public class SpecimenCdmExcelImport
 
 
        @Override
-       protected String getWorksheetName() {
+       protected String getWorksheetName(SpecimenCdmExcelImportConfigurator config) {
                return WORKSHEET_NAME;
        }
 
@@ -981,33 +943,19 @@ public class SpecimenCdmExcelImport
                return false;
        }
 
-
-       /* (non-Javadoc)
-        * @see eu.etaxonomy.cdm.io.excel.common.ExcelTaxonOrSpecimenImportBase#createDataHolderRow()
-        */
        @Override
        protected SpecimenRow createDataHolderRow() {
                return new SpecimenRow();
        }
 
-
-
-
-       /* (non-Javadoc)
-        * @see eu.etaxonomy.cdm.io.common.CdmIoBase#doCheck(eu.etaxonomy.cdm.io.common.IoStateBase)
-        */
        @Override
        protected boolean doCheck(SpecimenCdmExcelImportState state) {
                logger.warn("Validation not yet implemented for " + this.getClass().getSimpleName());
                return true;
        }
 
-
-
        @Override
        protected boolean isIgnore(SpecimenCdmExcelImportState state) {
                return !state.getConfig().isDoSpecimen();
        }
-
-
 }
index ccb5ce8f5253ddb8cde421be25f5140589221035..93a1d173073de18a2b7be5fc67de57463a39d931 100644 (file)
@@ -877,7 +877,7 @@ public class SpecimenSythesysExcelImport  extends CdmImportBase<SpecimenSynthesy
         refreshTransaction();
 
         URI source = state.getConfig().getSource();
-        ArrayList<HashMap<String,String>> unitsList = null;
+        List<Map<String,String>> unitsList = null;
         try{
             unitsList = ExcelUtils.parseXLS(source);
             logger.info("unitslist : "+unitsList.size());
@@ -901,7 +901,7 @@ public class SpecimenSythesysExcelImport  extends CdmImportBase<SpecimenSynthesy
 
                 specimenOrObs = getOccurrenceService().listByAssociatedTaxon(null, null, taxon, null, null, null, null, null);
             }
-            HashMap<String,String> unit=null;
+            Map<String,String> unit=null;
             MyHashMap<String,String> myunit;
             for (int i=0; i<unitsList.size();i++){
                 //            for (int i=0; i<10;i++){
@@ -936,13 +936,13 @@ public class SpecimenSythesysExcelImport  extends CdmImportBase<SpecimenSynthesy
      * @param unitsList
      * @param state
      */
-    private void prepareCollectors(ArrayList<HashMap<String, String>> unitsList, SpecimenSynthesysExcelImportState state) {
+    private void prepareCollectors(List<Map<String, String>> unitsList, SpecimenSynthesysExcelImportState state) {
         System.out.println("PREPARE COLLECTORS");
-        List<String> collectors = new ArrayList<String>();
-        List<String> teams = new ArrayList<String>();
-        List<List<String>> collectorinteams = new ArrayList<List<String>>();
+        List<String> collectors = new ArrayList<>();
+        List<String> teams = new ArrayList<>();
+        List<List<String>> collectorinteams = new ArrayList<>();
         String tmp;
-        for (HashMap<String,String> unit : unitsList){
+        for (Map<String,String> unit : unitsList){
             tmp=null;
             tmp = unit.get("collector");
             if (tmp != null && !tmp.isEmpty()) {
index 0ce3855fcb619aaae1aef4924ee5bec679c3ad85..9b93679bd1eb439a91549484439d28c30e78f427 100644 (file)
@@ -95,7 +95,7 @@ public class TcsXmlTaxonRelationsImport extends TcsXmlImportBase implements ICdm
                ResultWrapper<Boolean> success = ResultWrapper.NewInstance(true);\r
                String childName;\r
                boolean obligatory;\r
-               String idNamespace = "TaxonRelation";\r
+               String idNamespace = "TaxonRelationDTO";\r
 \r
                TcsXmlImportConfigurator config = state.getConfig();\r
                Element elDataSet = super.getDataSetElement(config);\r
index 3df3c1ea0de3ffcd0944ebd4f02c33f4f09e6792..77b4b2759ceaa9d07bc6758ff907ee327d62fe61 100644 (file)
@@ -114,7 +114,7 @@ public class SDDImportExportTest extends CdmTransactionalIntegrationTest {
 \r
         sddImport.doInvoke(new SDDImportState(importConfigurator));\r
 \r
-        logger.setLevel(Level.DEBUG);\r
+        //logger.setLevel(Level.DEBUG);\r
         commitAndStartNewTransaction(new String[]{"DEFINEDTERMBASE"});\r
         logger.setLevel(Level.DEBUG);\r
 \r
index 2a9a1e9b7e0e0fb6ac173bd8a9b591cb715af3f3..97d8f100922302ff272b2865edfe25a5950cd57e 100644 (file)
@@ -2,7 +2,7 @@
   <parent>
     <artifactId>cdmlib-parent</artifactId>
     <groupId>eu.etaxonomy</groupId>
-    <version>5.2.0</version>
+    <version>5.3.0</version>
     <relativePath>../pom.xml</relativePath>
   </parent>
   <modelVersion>4.0.0</modelVersion>
diff --git a/cdmlib-model/src/main/java/eu/etaxonomy/cdm/exception/FilterException.java b/cdmlib-model/src/main/java/eu/etaxonomy/cdm/exception/FilterException.java
new file mode 100644 (file)
index 0000000..38a0e6d
--- /dev/null
@@ -0,0 +1,65 @@
+/**
+* Copyright (C) 2018 EDIT
+* European Distributed Institute of Taxonomy
+* http://www.e-taxonomy.eu
+*
+* The contents of this file are subject to the Mozilla Public License Version 1.1
+* See LICENSE.TXT at the top of this package for the full license terms.
+*/
+package eu.etaxonomy.cdm.exception;
+
+/**
+ * This Exception is for throwing an exception if a filter prevents a method
+ * from returning any value except an empty one.
+ * This is because the filter condition itself can never be <code>true</code>
+ * (e.g. because a given subtree ID does not exist in the database at all)
+ * or because non of the data matches the filter.
+ * <BR>
+ * In the first case it is recommended to set the invalidFilter parameter to <code>true</code>
+ * to indicate that the filter itself might be not correct.
+ *
+ * @author a.mueller
+ * @since 14.09.2018
+ */
+public class FilterException extends Exception {
+
+    private static final long serialVersionUID = 7491596488082796101L;
+
+    private boolean invalidFilter;
+
+    public FilterException(boolean invalidFilter) {
+        super();
+        this.setInvalidFilter(invalidFilter);
+    }
+
+    public FilterException(String message, boolean invalidFilter) {
+        super(message);
+        this.setInvalidFilter(invalidFilter);
+    }
+
+    /**
+     * @param cause
+     */
+    public FilterException(Throwable cause, boolean invalidFilter) {
+        super(cause);
+        this.setInvalidFilter(invalidFilter);
+    }
+
+    /**
+     * @param message
+     * @param cause
+     */
+    public FilterException(String message, Throwable cause, boolean invalidFilter) {
+        super(message, cause);
+        this.setInvalidFilter(invalidFilter);
+    }
+
+    public boolean isInvalidFilter() {
+        return invalidFilter;
+    }
+
+    public void setInvalidFilter(boolean invalidFilter) {
+        this.invalidFilter = invalidFilter;
+    }
+
+}
index 98b7d7738141a8cedc402a57a71da683dd6c8bf8..dd4bf2fe9625b32a2b4d0a1ffc71f8b2b6c5adcc 100644 (file)
@@ -25,7 +25,7 @@ import eu.etaxonomy.cdm.model.occurrence.MediaSpecimen;
 /**
  * Factory class that instantiates a matching ICdmFormatter for the given object
  * and configures the format according to the given formatKeys.
- * 
+ *
  * @author pplitzner
  * @since Nov 30, 2015
  *
@@ -35,7 +35,7 @@ public class CdmFormatterFactory {
        /**
         * Returns a matching ICdmFormatter for the given object configured with the
         * given formatKeys
-        * 
+        *
         * @param object
         *            the object which should be formatted as a string
         * @param formatKeys
@@ -74,7 +74,7 @@ public class CdmFormatterFactory {
        /**
         * Convenience method which directly formats the given object according to
         * the given formatKeys.
-        * 
+        *
         * @param object
         *            the object which should be formatted as a string
         * @param formatKeys
index b63eed0c74aa524e040e64814af2580d6ef76eac..dc3eea4d41e331efd3555b54e6932a688bbe0efc 100644 (file)
@@ -11,9 +11,10 @@ package eu.etaxonomy.cdm.format.taxon;
 import java.util.ArrayList;
 import java.util.List;
 
-import org.codehaus.plexus.util.StringUtils;
+import org.apache.commons.lang.StringUtils;
 
 import eu.etaxonomy.cdm.common.CdmUtils;
+import eu.etaxonomy.cdm.common.UTF8;
 import eu.etaxonomy.cdm.model.agent.Person;
 import eu.etaxonomy.cdm.model.agent.Team;
 import eu.etaxonomy.cdm.model.agent.TeamOrPersonBase;
@@ -38,6 +39,10 @@ import eu.etaxonomy.cdm.strategy.cache.agent.TeamDefaultCacheStrategy;
  */
 public class TaxonRelationshipFormatter {
 
+    /**
+     *
+     */
+    private static final String DOUBTFUL_TAXON_MARKER = "?" + UTF8.NARROW_NO_BREAK;
     private static final String REL_SEC = ", rel. sec. ";
     private static final String ERR_SEC = ", err. sec. ";
     private static final String SYN_SEC = ", syn. sec. ";
@@ -53,6 +58,12 @@ public class TaxonRelationshipFormatter {
     private static final String UNDEFINED_SYMBOL = "??";  //TODO
 
     public List<TaggedText> getTaggedText(TaxonRelationship taxonRelationship, boolean reverse, List<Language> languages) {
+        return getTaggedText(taxonRelationship, reverse, languages, false);
+    }
+
+
+
+    public List<TaggedText> getTaggedText(TaxonRelationship taxonRelationship, boolean reverse, List<Language> languages, boolean withoutName) {
 
         if (taxonRelationship == null){
             return null;
@@ -69,45 +80,51 @@ public class TaxonRelationshipFormatter {
         if (relatedTaxon == null){
             return null;
         }
-        boolean isDoubtful = taxonRelationship.isDoubtful() || relatedTaxon.isDoubtful();
-        String doubtfulStr = isDoubtful ? "?" : "";
+
+        String doubtfulTaxonStr = relatedTaxon.isDoubtful() ? DOUBTFUL_TAXON_MARKER : "";
+        String doubtfulRelationStr = taxonRelationship.isDoubtful() ? "?" : "";
+
 
         TaxonName name = relatedTaxon.getName();
 
-//        List<TaggedText> tags = new ArrayList<>();
         TaggedTextBuilder builder = new TaggedTextBuilder();
 
         //rel symbol
-        String symbol = getSymbol(type, reverse, languages);
+        String symbol = doubtfulRelationStr + getSymbol(type, reverse, languages);
         builder.add(TagEnum.symbol, symbol);
 
         //name
-        if (isMisapplied){
-            //starting quote
-            String startQuote = " " + doubtfulStr + QUOTE_START;
-            builder.addSeparator(startQuote);// .add(TaggedText.NewSeparatorInstance(startQuote));
-
-            //name cache
-            List<TaggedText> nameCacheTags = getNameCacheTags(name);
-            builder.addAll(nameCacheTags);
-
-            //end quote
-            String endQuote = QUOTE_END;
-            builder.add(TagEnum.postSeparator, endQuote);
+        if (!withoutName){
+            if (isMisapplied){
+                //starting quote
+                String startQuote = " " + doubtfulTaxonStr + QUOTE_START;
+                builder.addSeparator(startQuote);
+
+                //name cache
+                List<TaggedText> nameCacheTags = getNameCacheTags(name);
+                builder.addAll(nameCacheTags);
+
+                //end quote
+                String endQuote = QUOTE_END;
+                builder.add(TagEnum.postSeparator, endQuote);
+            }else{
+                builder.addSeparator(" " + doubtfulTaxonStr);
+                //name full title cache
+                List<TaggedText> nameCacheTags = getNameTitleCacheTags(name);
+                builder.addAll(nameCacheTags);
+            }
         }else{
-            builder.addSeparator(" " + doubtfulStr);
-            //name full title cache
-            List<TaggedText> nameCacheTags = getNameTitleCacheTags(name);
-            builder.addAll(nameCacheTags);
+            if (isNotBlank(doubtfulTaxonStr)){
+                builder.addSeparator(" " + doubtfulTaxonStr);
+            }
         }
 
-
-        //sensu (+ Separatoren?)
+        //sec/sensu (+ Separatoren?)
         if (isNotBlank(relatedTaxon.getAppendedPhrase())){
             builder.addWhitespace();
             builder.add(TagEnum.appendedPhrase, relatedTaxon.getAppendedPhrase());
         }
-        List<TaggedText> secTags = getSensuTags(relatedTaxon.getSec(), relatedTaxon.getSecMicroReference(),
+        List<TaggedText> secTags = getReferenceTags(relatedTaxon.getSec(), relatedTaxon.getSecMicroReference(),
                /* isMisapplied,*/ false);
         if (!secTags.isEmpty()) {
             builder.addSeparator(isMisapplied? SENSU_SEPARATOR : SEC_SEPARATOR);
@@ -124,16 +141,13 @@ public class TaxonRelationshipFormatter {
 
 //        //, non author
         if (isMisapplied && name != null){
-            if (name.getCombinationAuthorship() != null && isNotBlank(name.getCombinationAuthorship().getNomenclaturalTitle())){
-                builder.addSeparator(NON_SEPARATOR);
-                builder.add(TagEnum.authors, name.getCombinationAuthorship().getNomenclaturalTitle());
-            }else if (isNotBlank(name.getAuthorshipCache())){
+            if (isNotBlank(name.getAuthorshipCache())){
                 builder.addSeparator(NON_SEPARATOR);
                 builder.add(TagEnum.authors, name.getAuthorshipCache().trim());
             }
         }
 
-        List<TaggedText> relSecTags = getSensuTags(taxonRelationship.getCitation(),
+        List<TaggedText> relSecTags = getReferenceTags(taxonRelationship.getCitation(),
                 taxonRelationship.getCitationMicroReference(),true);
         if (!relSecTags.isEmpty()){
             builder.addSeparator(isSynonym ? SYN_SEC : isMisapplied ? ERR_SEC : REL_SEC);
@@ -143,7 +157,7 @@ public class TaxonRelationshipFormatter {
         return builder.getTaggedText();
     }
 
-    private List<TaggedText> getSensuTags(Reference ref, String detail, /*boolean isSensu,*/ boolean isRelation) {
+    private List<TaggedText> getReferenceTags(Reference ref, String detail, /*boolean isSensu,*/ boolean isRelation) {
         List<TaggedText> result = new ArrayList<>();
         String secRef;
 
@@ -195,7 +209,7 @@ public class TaxonRelationshipFormatter {
             Team team = CdmBase.deproxy(author, Team.class);
             String result = null;
             int n = team.getTeamMembers().size();
-            int index = 0;
+            int index = 1;
             if (team.isHasMoreMembers()){
                 n++;
             }
index a1eb9817913f11fae991c86a0ebca9559a4e4ea5..2cb8705b5a7f790243cbbddfed4c10978b8c0baa 100644 (file)
@@ -17,6 +17,7 @@ import org.hibernate.search.bridge.LuceneOptions;
 import eu.etaxonomy.cdm.model.taxon.Synonym;
 import eu.etaxonomy.cdm.model.taxon.Taxon;
 import eu.etaxonomy.cdm.model.taxon.TaxonBase;
+import eu.etaxonomy.cdm.model.taxon.TaxonNode;
 
 /**
  * Lucene index class bridge which sets the uuids of the accepted taxon for the
@@ -37,18 +38,22 @@ public class AcceptedTaxonBridge implements FieldBridge { // TODO inherit from A
     public final static String DOC_KEY_UUID_SUFFIX = ".uuid";
     public static final String DOC_KEY_ID_SUFFIX = ".id";
     public final static String DOC_KEY_PUBLISH_SUFFIX = ".publish";
-
+    public final static String DOC_KEY_TREEINDEX = "taxonNodes.treeIndex";
+    public final static String DOC_KEY_CLASSIFICATION_ID = "taxonNodes.classification.id";
+    public final static String ACC_TAXON = "accTaxon"; //there are probably still some places not using this constant, but for renaming in future we should try to use it everywhere
 
     @Override
     public void set(String name, Object value, Document document,
             LuceneOptions luceneOptions) {
         String accTaxonUuid = "";
 
+        boolean isSynonym = false;
         Taxon accTaxon;
         if(value instanceof Taxon){
             accTaxon = (Taxon)value;
         }else if (value instanceof Synonym){
             accTaxon = ((Synonym)value).getAcceptedTaxon();
+            isSynonym = true;
         }else{
             throw new RuntimeException("Unhandled taxon base class: " + value.getClass().getSimpleName());
         }
@@ -77,6 +82,30 @@ public class AcceptedTaxonBridge implements FieldBridge { // TODO inherit from A
                     luceneOptions.getStore()
                     );
             document.add(accPublishField);
+
+            //treeIndex + Classification
+            if (isSynonym && ACC_TAXON.equals(name)){
+                for (TaxonNode node : accTaxon.getTaxonNodes()){
+                    //treeIndex
+                    Field treeIndexField;
+                    if (node.treeIndex()!= null){  //TODO find out why this happens in TaxonServiceSearchTest.testFindByDescriptionElementFullText_modify_Classification
+                        treeIndexField = new StringField(DOC_KEY_TREEINDEX,
+                                node.treeIndex(),
+                                luceneOptions.getStore()
+                                );
+                        document.add(treeIndexField);
+                    }
+
+                    //classification
+                    if (node.getClassification() != null){  //should never be null, but who knows
+                        Field classificationIdField = new StringField(DOC_KEY_CLASSIFICATION_ID,
+                                Integer.toString(node.getClassification().getId()),
+                                luceneOptions.getStore()
+                                );
+                        document.add(classificationIdField);
+                    }
+                }
+            }
         }
     }
 }
index e27d6d5c6124f1130a30c9f04527c5a8b2361d8f..c963193eb929421c124f696e01f67f6d373ef531 100644 (file)
@@ -67,7 +67,15 @@ public class DescriptionBaseClassBridge extends AbstractClassBridge {
 
                     for(TaxonNode node : taxon.getTaxonNodes()){
                         if(node.getClassification() != null){
-                            idFieldBridge.set(name + "taxon.taxonNodes.classification.id", node.getClassification().getId(), document, idFieldOptions);
+                            idFieldBridge.set(name + "taxon.taxonNodes.classification.id",
+                                    node.getClassification().getId(), document, idFieldOptions);
+                        }
+                        if(node.treeIndex() != null){
+                            Field treeIndexField = new StringField("inDescription.taxon.taxonNodes.treeIndex",
+                                    node.treeIndex(),
+                                    Store.YES
+                                    );
+                            document.add(treeIndexField);
                         }
                     }
                 }
index 19623d56bb1c97c324352f5faf020dda88d615fb..c4f250dee7c9f7ebcd7a439dfb212a72a364e43b 100644 (file)
@@ -20,6 +20,7 @@ import org.hibernate.search.bridge.StringBridge;
  */
 public class UuidBridge implements StringBridge {
 
+    @Override
     public String objectToString(Object object) {
         if(object != null) {
             return ((UUID)object).toString();
index 58b33f0e2d3e8baba269d3401c3a87bed9afda0f..938200f456d92479c2da265393a007079a452550 100644 (file)
@@ -37,9 +37,9 @@ public interface ICdmCacher {
     public void put(CdmBase cdmEntity);
 
     /**
-     * load into the cache and return the entity from the cache. The the entity
-     * might already exist in the cache. In case the entity in the cache might
-     * get updated whereas the returned entity represents is the entity from the
+     * Load into the cache and return the entity from the cache. The entity
+     * might already exist in the cache. In this case the entity in the cache might
+     * get updated whereas the returned entity represents the entity from the
      * cache not the <code>cdmEntity</code> passed to this method.
      *
      * @param cdmEntity
@@ -61,4 +61,6 @@ public interface ICdmCacher {
      */
     public boolean exists(CdmBase cdmBase);
 
+    public void dispose();
+
 }
index 98db6e52abf55e073471b48c9cbc30b9c0837c76..45d33cdb39b3efeaefac0861e6493158c4373620 100644 (file)
@@ -15,6 +15,8 @@ import java.util.Set;
 
 import javax.persistence.Embedded;
 import javax.persistence.Entity;
+import javax.persistence.Index;
+import javax.persistence.Table;
 import javax.persistence.Transient;
 import javax.xml.bind.annotation.XmlAccessType;
 import javax.xml.bind.annotation.XmlAccessorType;
@@ -24,8 +26,6 @@ import javax.xml.bind.annotation.XmlType;
 import org.apache.log4j.Logger;
 import org.hibernate.annotations.Cascade;
 import org.hibernate.annotations.CascadeType;
-import org.hibernate.annotations.Index;
-import org.hibernate.annotations.Table;
 import org.hibernate.envers.Audited;
 
 import eu.etaxonomy.cdm.model.common.IIntextReferenceTarget;
@@ -56,7 +56,7 @@ import eu.etaxonomy.cdm.strategy.merge.MergeMode;
 })
 @Entity
 @Audited
-@Table(appliesTo="AgentBase", indexes = { @Index(name = "agentTitleCacheIndex", columnNames = { "titleCache" }) })
+@Table(name="AgentBase", indexes = { @Index(name = "agentTitleCacheIndex", columnList = "titleCache") })
 public abstract class AgentBase<S extends IIdentifiableEntityCacheStrategy<? extends AgentBase<S>>>
         extends IdentifiableMediaEntity<S>
         implements IMergable, IMatchable, IIntextReferenceTarget, Cloneable{
index f15e7a78ccbb4c688b1e197982c3754c33b78a64..a42e72b185d14cc04cda64d30c689b31912cbf84 100644 (file)
@@ -9,7 +9,9 @@
 
 package eu.etaxonomy.cdm.model.common;
 
+import java.util.ArrayList;
 import java.util.HashSet;
+import java.util.List;
 import java.util.Set;
 import java.util.UUID;
 
@@ -123,6 +125,25 @@ public abstract class AnnotatableEntity extends VersionableEntity implements IAn
                }
        }
 
+        public void setAnnotations(Set<Annotation> annotations) {
+        List<Annotation> currentAnnotations = new ArrayList<>(annotations);
+        List<Annotation> annotationsSeen = new ArrayList<>();
+        for(Annotation a : annotations){
+            if(a == null){
+                continue;
+            }
+            if(!currentAnnotations.contains(a)){
+                addAnnotation(a);
+            }
+            annotationsSeen.add(a);
+        }
+        for(Annotation a : currentAnnotations){
+            if(!annotationsSeen.contains(a)){
+                removeAnnotation(a);
+            }
+        }
+    }
+
 //********************** CLONE *****************************************/
 
 
index be370fae5ba3ecf4d704a7a97b02e377903d39d9..50ded826f04fced21c4acf9b9dd935a0beae1727 100644 (file)
@@ -1,8 +1,8 @@
 /**\r
 * Copyright (C) 2007 EDIT\r
-* European Distributed Institute of Taxonomy \r
+* European Distributed Institute of Taxonomy\r
 * http://www.e-taxonomy.eu\r
-* \r
+*\r
 * The contents of this file are subject to the Mozilla Public License Version 1.1\r
 * See LICENSE.TXT at the top of this package for the full license terms.\r
 */\r
@@ -16,31 +16,45 @@ import org.hibernate.event.spi.SaveOrUpdateEventListener;
 /**\r
  * Common interface for all tree data structures supporting tree indexing.\r
  * Mainly used by {@link SaveOrUpdateEventListener} to update the indices.\r
- * \r
+ *\r
  * @author a.mueller\r
  * @since 12.08.2013\r
  *\r
  */\r
 public interface ITreeNode<T extends ITreeNode<T>> extends ICdmBase {\r
-       \r
+\r
        //Constants\r
        //the separator used in the tree index to separate the id's of the parent nodes\r
        public static final String separator = "#";\r
-       \r
+\r
        //The prefix used in the tree index for the id of the tree itself\r
        public static final String treePrefix = "t";\r
-       \r
-       \r
+\r
+\r
        //METHODS\r
-       \r
-       \r
+\r
+\r
        /**\r
         * Returns the tree index of this tree node.\r
         * @return the tree index\r
         */\r
        public String treeIndex();\r
 \r
-       \r
+       /**\r
+     * Returns the tree index followed by an SQL wildcard '%'.\r
+     * @see #treeIndex()\r
+     * @see #treeIndexWc()\r
+     */\r
+    public String treeIndexLike();\r
+\r
+    /**\r
+     * Returns the tree index followed by a asterisk wildcard '*'.\r
+     * @see #treeIndex()\r
+     * @see #treeIndexLike()\r
+     */\r
+    public String treeIndexWc();\r
+\r
+\r
        /**\r
         * Returns the parent node of this node.\r
         * Returns <code>null</code> if this\r
@@ -52,21 +66,23 @@ public interface ITreeNode<T extends ITreeNode<T>> extends ICdmBase {
        /**\r
         * Sets the tree index of this node.\r
         * @deprecated preliminary implementation for updating the treeindex.\r
-        * Maybe removed once index updating is improved.\r
+        * May be removed once index updating is improved.\r
         * @param newTreeIndex\r
         */\r
-       public void setTreeIndex(String newTreeIndex);\r
-       \r
+       @Deprecated\r
+    public void setTreeIndex(String newTreeIndex);\r
+\r
        /**\r
         * Returns all direct child nodes of this node.\r
         * As tree node children do not necessarily need to be\r
         * {@link List lists} the return type of this method may change\r
         * to {@link Collection} in future. Therefore the use\r
         * at the moment is deprecated.\r
-        * @deprecated return type may become {@link Collection} in future \r
+        * @deprecated return type may become {@link Collection} in future\r
         * @return the list of children\r
         */\r
-       public List<T> getChildNodes();\r
+       @Deprecated\r
+    public List<T> getChildNodes();\r
 \r
        /**\r
         * Returns the {@link ICdmBase#getId() id} of the tree object,\r
@@ -75,6 +91,7 @@ public interface ITreeNode<T extends ITreeNode<T>> extends ICdmBase {
         * use this anymore\r
         * @return the id of the tree\r
         */\r
-       public int treeId();\r
+       @Deprecated\r
+    public int treeId();\r
 \r
 }\r
index f24f9b6b95ac2970c8c435d91482891cf3cfa471..f666bf50258a27d49cdf72c57161b7b4cc29df56 100644 (file)
@@ -11,7 +11,9 @@ package eu.etaxonomy.cdm.model.common;
 import javax.persistence.Column;
 import javax.persistence.Entity;
 import javax.persistence.FetchType;
+import javax.persistence.Index;
 import javax.persistence.ManyToOne;
+import javax.persistence.Table;
 import javax.xml.bind.annotation.XmlAccessType;
 import javax.xml.bind.annotation.XmlAccessorType;
 import javax.xml.bind.annotation.XmlElement;
@@ -21,8 +23,6 @@ import javax.xml.bind.annotation.XmlType;
 
 import org.apache.commons.lang.StringUtils;
 import org.apache.log4j.Logger;
-import org.hibernate.annotations.Index;
-import org.hibernate.annotations.Table;
 import org.hibernate.envers.Audited;
 import org.hibernate.search.annotations.Field;
 
@@ -40,9 +40,12 @@ import eu.etaxonomy.cdm.validation.annotation.NullOrNotEmpty;
 })
 @Entity
 @Audited
-@Table(appliesTo="Identifier", indexes = { @Index(name = "identifierIndex", columnNames = { "identifier" }) })
-public class Identifier<T extends IdentifiableEntity<?>> extends AnnotatableEntity implements Cloneable {
-       private static final long serialVersionUID = 3337567049024506936L;
+@Table(name="Identifier", indexes = { @Index(name = "identifierIndex", columnList = "identifier") })
+public class Identifier<T extends IdentifiableEntity<?>>
+            extends AnnotatableEntity
+            implements Cloneable {
+
+    private static final long serialVersionUID = 3337567049024506936L;
        @SuppressWarnings("unused")
        private static final Logger logger = Logger.getLogger(Identifier.class);
 
index 6d11570b09b8cf5793cf4b1dbd074fb03d291712..a5ad864efc4993f4eb31455976aff1b0edac9b04 100644 (file)
@@ -19,6 +19,7 @@ import javax.persistence.FetchType;
 import javax.persistence.Inheritance;
 import javax.persistence.InheritanceType;
 import javax.persistence.OneToMany;
+import javax.persistence.Table;
 import javax.validation.constraints.NotNull;
 import javax.xml.bind.annotation.XmlAccessType;
 import javax.xml.bind.annotation.XmlAccessorType;
@@ -32,7 +33,6 @@ import org.apache.commons.lang.StringUtils;
 import org.apache.log4j.Logger;
 import org.hibernate.annotations.Cascade;
 import org.hibernate.annotations.CascadeType;
-import org.hibernate.annotations.Table;
 import org.hibernate.annotations.Type;
 import org.hibernate.envers.Audited;
 import org.springframework.util.Assert;
@@ -61,10 +61,11 @@ import eu.etaxonomy.cdm.strategy.merge.MergeMode;
 @Entity
 @Audited
 @Inheritance(strategy=InheritanceType.SINGLE_TABLE)
-@Table(appliesTo="OriginalSourceBase")
+@Table(name="OriginalSourceBase")
 public abstract class OriginalSourceBase<T extends ISourceable>
         extends ReferencedEntityBase
         implements IOriginalSource<T>, IIntextReferenceTarget,Cloneable {
+
        private static final long serialVersionUID = -1972959999261181462L;
        @SuppressWarnings("unused")
        private static final Logger logger = Logger.getLogger(OriginalSourceBase.class);
index 7afcdda79f16c81c2b53d7143c91e1a01b7b74a3..bdc7b458a9b6d09830f84dee14d72932472a48a3 100644 (file)
@@ -61,12 +61,16 @@ import eu.etaxonomy.cdm.strategy.cache.common.TimePeriodPartialFormatter;
 public class TimePeriod implements Cloneable, Serializable {
     private static final long serialVersionUID = 3405969418194981401L;
     private static final Logger logger = Logger.getLogger(TimePeriod.class);
-    public static final DateTimeFieldType MONTH_TYPE = DateTimeFieldType.monthOfYear();
     public static final DateTimeFieldType YEAR_TYPE = DateTimeFieldType.year();
+    public static final DateTimeFieldType MONTH_TYPE = DateTimeFieldType.monthOfYear();
     public static final DateTimeFieldType DAY_TYPE = DateTimeFieldType.dayOfMonth();
     public static final DateTimeFieldType HOUR_TYPE = DateTimeFieldType.hourOfDay();
     public static final DateTimeFieldType MINUTE_TYPE = DateTimeFieldType.minuteOfHour();
 
+    public static final Partial CONTINUED = new Partial
+            (new DateTimeFieldType[]{YEAR_TYPE, MONTH_TYPE, DAY_TYPE},
+             new int[]{9999, 11, 30});
+
     @XmlElement(name = "Start")
     @XmlJavaTypeAdapter(value = PartialAdapter.class)
     @Type(type="partialUserType")
@@ -83,7 +87,6 @@ public class TimePeriod implements Cloneable, Serializable {
     @JsonIgnore // currently used for swagger model scanner
     private Partial end;
 
-
     @XmlElement(name = "FreeText")
     private String freeText;
 
@@ -289,8 +292,8 @@ public class TimePeriod implements Cloneable, Serializable {
         start=startDate;
     }
     public TimePeriod(Partial startDate, Partial endDate) {
-        start=startDate;
-        end=endDate;
+        start = startDate;
+        end = endDate;
     }
 
 //******************* GETTER / SETTER ************************************/
@@ -308,7 +311,7 @@ public class TimePeriod implements Cloneable, Serializable {
 
     @JsonIgnore // currently used for swagger model scanner
     public Partial getEnd() {
-        return end;
+        return isContinued() ? null : end;
     }
 
     public void setEnd(Partial end) {
@@ -337,6 +340,29 @@ public class TimePeriod implements Cloneable, Serializable {
     }
 
 
+    /**
+     * Returns the continued flag (internally stored as a constant
+     * far away date. {@link #CONTINUED}
+     * @return
+     */
+    public boolean isContinued() {
+        return CONTINUED.equals(end);
+    }
+    /**
+     * Sets the (virtual) continued flag.<BR><BR>
+     * NOTE: setting the flag to true, will remove an
+     * existing end date.
+     * @param isContinued
+     */
+    public void setContinued(boolean isContinued) {
+        if (isContinued == true){
+            this.end = CONTINUED;
+        }else if (isContinued()){
+            this.end = null;
+        }
+    }
+
+
 //******************* Transient METHODS ************************************/
 
     /**
@@ -368,23 +394,6 @@ public class TimePeriod implements Cloneable, Serializable {
     }
 
 
-
-    @Transient
-    public String getYear(){
-        String result = "";
-        if (getStartYear() != null){
-            result += String.valueOf(getStartYear());
-            if (getEndYear() != null){
-                result += "-" + String.valueOf(getEndYear());
-            }
-        }else{
-            if (getEndYear() != null){
-                result += String.valueOf(getEndYear());
-            }
-        }
-        return result;
-    }
-
     @Transient
     public Integer getStartYear(){
         return getPartialValue(start, YEAR_TYPE);
@@ -402,17 +411,17 @@ public class TimePeriod implements Cloneable, Serializable {
 
     @Transient
     public Integer getEndYear(){
-        return getPartialValue(end, YEAR_TYPE);
+        return getPartialValue(getEnd(), YEAR_TYPE);
     }
 
     @Transient
     public Integer getEndMonth(){
-        return getPartialValue(end, MONTH_TYPE);
+        return getPartialValue(getEnd(), MONTH_TYPE);
     }
 
     @Transient
     public Integer getEndDay(){
-        return getPartialValue(end, DAY_TYPE);
+        return getPartialValue(getEnd(), DAY_TYPE);
     }
 
     public TimePeriod setStartYear(Integer year){
@@ -462,7 +471,7 @@ public class TimePeriod implements Cloneable, Serializable {
     @Transient
     private TimePeriod setEndField(Integer value, DateTimeFieldType type)
             throws IndexOutOfBoundsException{
-        end = setPartialField(end, value, type);
+        end = setPartialField(getEnd(), value, type);
         return this;
     }
 
@@ -512,7 +521,6 @@ public class TimePeriod implements Cloneable, Serializable {
     @Override
     public String toString(){
         String result = null;
-//        DateTimeFormatter formatter = TimePeriodPartialFormatter.NewInstance();
         if ( StringUtils.isNotBlank(this.getFreeText())){
             result = this.getFreeText();
         }else{
@@ -523,15 +531,37 @@ public class TimePeriod implements Cloneable, Serializable {
 
     /**
      * Returns the concatenation of <code>start</code> and <code>end</code>
-     *
      */
     public String getTimePeriod(){
         String result = null;
         DateTimeFormatter formatter = TimePeriodPartialFormatter.NewInstance();
         String strStart = start != null ? start.toString(formatter): null;
-        String strEnd = end != null ? end.toString(formatter): null;
-        result = CdmUtils.concat("-", strStart, strEnd);
+        if (isContinued()){
+            result = CdmUtils.concat("", strStart, "+");
+        }else{
+            String strEnd = end != null ? end.toString(formatter): null;
+            result = CdmUtils.concat("-", strStart, strEnd);
+        }
+
+        return result;
+    }
 
+    @Transient
+    public String getYear(){
+        String result = "";
+        if (getStartYear() != null){
+            result += String.valueOf(getStartYear());
+            if (getEndYear() != null){
+                result += "-" + String.valueOf(getEndYear());
+            }
+        }else{
+            if (getEndYear() != null){
+                result += String.valueOf(getEndYear());
+            }
+        }
+        if (isContinued()){
+            result += "+";
+        }
         return result;
     }
 
@@ -571,9 +601,9 @@ public class TimePeriod implements Cloneable, Serializable {
     public int hashCode() {
         int hashCode = 7;
         hashCode = 29*hashCode +
-                    (start== null? 33: start.hashCode()) +
-                    (end== null? 39: end.hashCode()) +
-                    (freeText== null? 41: freeText.hashCode());
+                    (start == null? 33: start.hashCode()) +
+                    (end == null? 39: end.hashCode()) +
+                    (freeText == null? 41: freeText.hashCode());
         return hashCode;
     }
 
@@ -602,5 +632,4 @@ public class TimePeriod implements Cloneable, Serializable {
         target.setFreeText(origin.freeText);
     }
 
-
 }
index 80229ebe84afbad577c6dbe6fcb1ba3b00f68ebe..6ca6a15840ec117ca25a49da3366716dd6d5ce21 100644 (file)
@@ -35,6 +35,19 @@ public class TreeIndex {
     }
 
 
+    /**
+     * @param subtree
+     * @return
+     */
+    public static TreeIndex NewInstance(TaxonNode node) {
+        if (node == null){
+            return null;
+        }else{
+            return new TreeIndex(node.treeIndex());
+        }
+    }
+
+
     /**
      * @param stringList
      * @return
@@ -47,6 +60,8 @@ public class TreeIndex {
         return result;
     }
 
+    //regEx, we also allow the tree itself to have a tree index (e.g. #t1#)
+    //this may change in future as not necessarily needed
     private static String regEx = "#[a-z](\\d+#)+";
     private static Pattern pattern = Pattern.compile(regEx);
 
@@ -88,6 +103,26 @@ public class TreeIndex {
         }
     }
 
+    public boolean isTreeRoot(){
+        int count = 0;
+        for (char c : this.treeIndex.toCharArray()){
+            if (c == '#') {
+                count++;
+            }
+        }
+        return count == 3;
+    }
+
+    public boolean isTree(){
+        int count = 0;
+        for (char c : this.treeIndex.toCharArray()){
+            if (c == '#') {
+                count++;
+            }
+        }
+        return count == 2;
+    }
+
 // ********************** STATIC METHODS  *****************************/
 
     /**
@@ -186,4 +221,6 @@ public class TreeIndex {
         return result;
     }
 
+
+
 }
index 8c1a75f293d79f220888602dd0eaac438afaa6d5..a550de9fec84b572112d558c4da75f167308e2f4 100644 (file)
@@ -17,6 +17,7 @@ import java.util.Set;
 import javax.persistence.Column;
 import javax.persistence.Entity;
 import javax.persistence.FetchType;
+import javax.persistence.Index;
 import javax.persistence.JoinColumn;
 import javax.persistence.JoinTable;
 import javax.persistence.ManyToMany;
@@ -24,6 +25,7 @@ import javax.persistence.ManyToOne;
 import javax.persistence.OneToMany;
 import javax.persistence.OrderBy;
 import javax.persistence.OrderColumn;
+import javax.persistence.Table;
 import javax.persistence.Transient;
 import javax.xml.bind.annotation.XmlAccessType;
 import javax.xml.bind.annotation.XmlAccessorType;
@@ -37,8 +39,6 @@ import javax.xml.bind.annotation.XmlType;
 import org.apache.log4j.Logger;
 import org.hibernate.annotations.Cascade;
 import org.hibernate.annotations.CascadeType;
-import org.hibernate.annotations.Index;
-import org.hibernate.annotations.Table;
 import org.hibernate.envers.Audited;
 
 import eu.etaxonomy.cdm.hibernate.HHH_9751_Util;
@@ -72,7 +72,7 @@ import eu.etaxonomy.cdm.model.common.VersionableEntity;
 @XmlRootElement(name = "FeatureNode")
 @Entity
 @Audited
-@Table(appliesTo="FeatureNode", indexes = { @Index(name = "featureNodeTreeIndex", columnNames = { "treeIndex" }) })
+@Table(name="FeatureNode", indexes = { @Index(name = "featureNodeTreeIndex", columnList = "treeIndex") })
 public class FeatureNode extends VersionableEntity
             implements ITreeNode<FeatureNode>, Cloneable {
        private static final Logger logger = Logger.getLogger(FeatureNode.class);
@@ -589,7 +589,14 @@ public class FeatureNode extends VersionableEntity
        @Override
        public String treeIndex() {
                return this.treeIndex;
-       }
+       }    @Override
+    public String treeIndexLike() {
+        return treeIndex + "%";
+    }
+    @Override
+    public String treeIndexWc() {
+        return treeIndex + "*";
+    }
 
        @Override
        @Deprecated
index efd623f6313ba33a1f8187292dcd9b7385ef4c46..c57c16bac422e16d31245f5d22e4b23226c91363 100644 (file)
@@ -30,7 +30,6 @@ import eu.etaxonomy.cdm.strategy.cache.description.SpecimenDescriptionDefaultCac
  * according to the SDD schema.
  *
  * @author a.mueller
- * @version 1.0
  * @since 08-Jul-2008
  */
 @XmlAccessorType(XmlAccessType.FIELD)
index 60dd4b93a0daa4a555bea510cd5f1a0b9128995a..89274da1a210b3c39eee676a345bcf41b7a91936 100644 (file)
@@ -54,7 +54,6 @@ import eu.etaxonomy.cdm.model.taxon.Taxon;
  * </ul>
  *
  * @author m.doering
- * @version 1.0
  * @since 08-Nov-2007 13:06:57
  */
 @XmlAccessorType(XmlAccessType.FIELD)
index 0f2976a955de475650759fb3c5c19953c9a8eaba..59a22a5f57624db7c3f38651f049cdc68dbf3ce0 100644 (file)
@@ -80,7 +80,7 @@ public class Point implements Cloneable, Serializable {
     @XmlElement(name = "ErrorRadius")
     @Field
     @NumericField
-    private Integer errorRadius = 0;
+    private Integer errorRadius;
 
     @XmlElement(name = "ReferenceSystem")
     @XmlIDREF
index 1c98461c4f922f1f5cddbfb19603ebac4a285b3d..9bae5d39c35e8288a0c2ce03e5cdf05f902f4f0d 100644 (file)
@@ -48,7 +48,6 @@ import eu.etaxonomy.cdm.model.common.VersionableEntity;
  * E.g. a list of jpg files that represent a scanned article of multiple pages.
  *
  * @author m.doering
- * @version 1.0
  * @since 08-Nov-2007 13:06:34
  */
 @XmlAccessorType(XmlAccessType.FIELD)
index 38821825d49cd4fa3366fe82034a9cfc696a2082..578fdfc5f9b2d88501ce48cb3c26ab6e309d350c 100644 (file)
@@ -85,8 +85,11 @@ import eu.etaxonomy.cdm.model.reference.Reference;
 @Entity
 @Audited
 @Configurable
-//@Table(appliesTo="Sequence", indexes = { @Index(name = "sequenceTitleCacheIndex", columnNames = { "titleCache" }) })
-public class Sequence extends AnnotatableEntity implements Cloneable{
+//@Table(name="Sequence", indexes = { @Index(name = "sequenceTitleCacheIndex", columnList = "titleCache") })
+public class Sequence
+               extends AnnotatableEntity
+               implements Cloneable{
+
        private static final long serialVersionUID = 8298983152731241775L;
        private static final Logger logger = Logger.getLogger(Sequence.class);
 
index 1e5c8e2a6dc9553ccc87402a80b83ab59c89789e..8615a4030c28bece518591054bfd12d2bc70ca74 100644 (file)
@@ -34,7 +34,6 @@ import eu.etaxonomy.cdm.model.reference.Reference;
  * One nomenclatural status can be assigned to several taxon names.
  *
  * @author m.doering
- * @version 1.0
  * @since 08-Nov-2007 13:06:39
  */
 @XmlAccessorType(XmlAccessType.FIELD)
index 761a8be536339734712cb48d06015367b7c1120d..900f3a1a7f6d6a7c49ad6c24149406993c047f6b 100644 (file)
@@ -835,7 +835,7 @@ public class NomenclaturalStatusType extends OrderedTermBase<NomenclaturalStatus
                statusAbbreviation = normalizeStatusAbbrev(statusAbbreviation);
 
                //TODO handle undefined names correctly
-               boolean isZooname = name.getNameType().equals(NomenclaturalCode.ICZN);
+               boolean isZooname = name == null? false : name.getNameType().equals(NomenclaturalCode.ICZN);
 
                Map<String, UUID> map = isZooname ? zooAbbrevMap : abbrevMap;
                if (map == null ){
index 03592bd5a8c4ede3195636b2a8c18955a6b87166..6c84e58cc3fd532fa480f2127912969323073a4f 100644 (file)
@@ -28,6 +28,7 @@ import javax.persistence.JoinTable;
 import javax.persistence.ManyToMany;
 import javax.persistence.ManyToOne;
 import javax.persistence.OneToMany;
+import javax.persistence.Table;
 import javax.persistence.Transient;
 import javax.validation.Valid;
 import javax.validation.constraints.Min;
@@ -47,7 +48,6 @@ import org.apache.commons.lang.StringUtils;
 import org.apache.log4j.Logger;
 import org.hibernate.annotations.Cascade;
 import org.hibernate.annotations.CascadeType;
-import org.hibernate.annotations.Table;
 import org.hibernate.annotations.Type;
 import org.hibernate.envers.Audited;
 import org.hibernate.search.annotations.Analyze;
@@ -179,9 +179,9 @@ import eu.etaxonomy.cdm.validation.annotation.ValidTaxonomicYear;
 @Entity
 @Audited
 @Inheritance(strategy=InheritanceType.SINGLE_TABLE)
-@Table(appliesTo="TaxonName", indexes = {
-        @org.hibernate.annotations.Index(name = "taxonNameBaseTitleCacheIndex", columnNames = { "titleCache" }),
-        @org.hibernate.annotations.Index(name = "taxonNameBaseNameCacheIndex", columnNames = { "nameCache" }) })
+@Table(name="TaxonName", indexes = {
+        @javax.persistence.Index(name = "taxonNameBaseTitleCacheIndex", columnList = "titleCache"),
+        @javax.persistence.Index(name = "taxonNameBaseNameCacheIndex", columnList = "nameCache") })
 @NameMustFollowCode
 @CorrectEpithetsForRank(groups = Level2.class)
 @NameMustHaveAuthority(groups = Level2.class)
index 3de465ce44e04b7703b0dfa6a459b20ed2110042..d262bdbfde69c3fe07ffd4c8e2838711c042aa5b 100644 (file)
@@ -14,6 +14,7 @@ import javax.persistence.Column;
 import javax.persistence.Entity;
 import javax.persistence.FetchType;
 import javax.persistence.ManyToOne;
+import javax.persistence.Table;
 import javax.xml.bind.annotation.XmlAccessType;
 import javax.xml.bind.annotation.XmlAccessorType;
 import javax.xml.bind.annotation.XmlElement;
@@ -26,7 +27,6 @@ import org.apache.commons.lang.StringUtils;
 import org.apache.log4j.Logger;
 import org.hibernate.annotations.Cascade;
 import org.hibernate.annotations.CascadeType;
-import org.hibernate.annotations.Table;
 import org.hibernate.envers.Audited;
 import org.hibernate.search.annotations.Analyze;
 import org.hibernate.search.annotations.Field;
@@ -62,9 +62,12 @@ import eu.etaxonomy.cdm.strategy.cache.occurrence.CollectionDefaultCacheStrategy
 //@Indexed(index = "eu.etaxonomy.cdm.model.occurrence.Collection")
 @Audited
 @Configurable
-@Table(appliesTo="Collection", indexes = { @org.hibernate.annotations.Index(name = "collectionTitleCacheIndex", columnNames = { "titleCache" }) })
-public class Collection extends IdentifiableMediaEntity<IIdentifiableEntityCacheStrategy<Collection>> implements Cloneable{
-       private static final long serialVersionUID = -7833674897174732255L;
+@Table(name="Collection", indexes = { @javax.persistence.Index(name = "collectionTitleCacheIndex", columnList = "titleCache") })
+public class Collection
+        extends IdentifiableMediaEntity<IIdentifiableEntityCacheStrategy<Collection>>
+        implements Cloneable{
+
+    private static final long serialVersionUID = -7833674897174732255L;
        private static final Logger logger = Logger.getLogger(Collection.class);
 
        @XmlElement(name = "Code")
index 16db5c1ea2b3b8b5f4efe4edb5417242e2b2efe8..1752d171ebdb565f9e387b71917228c89d121d46 100644 (file)
@@ -20,12 +20,14 @@ import java.util.Set;
 import javax.persistence.Column;
 import javax.persistence.Entity;
 import javax.persistence.FetchType;
+import javax.persistence.Index;
 import javax.persistence.Inheritance;
 import javax.persistence.InheritanceType;
 import javax.persistence.ManyToMany;
 import javax.persistence.ManyToOne;
 import javax.persistence.MapKeyJoinColumn;
 import javax.persistence.OneToMany;
+import javax.persistence.Table;
 import javax.persistence.Transient;
 import javax.validation.constraints.NotNull;
 import javax.xml.bind.annotation.XmlAccessType;
@@ -42,8 +44,6 @@ import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter;
 import org.apache.log4j.Logger;
 import org.hibernate.annotations.Cascade;
 import org.hibernate.annotations.CascadeType;
-import org.hibernate.annotations.Index;
-import org.hibernate.annotations.Table;
 import org.hibernate.annotations.Type;
 import org.hibernate.envers.Audited;
 import org.hibernate.search.annotations.Analyze;
@@ -104,11 +104,12 @@ import eu.etaxonomy.cdm.strategy.match.MatchMode;
 @Entity
 @Audited
 @Inheritance(strategy=InheritanceType.SINGLE_TABLE)
-@Table(appliesTo="SpecimenOrObservationBase", indexes = { @Index(name = "specimenOrObservationBaseTitleCacheIndex", columnNames = { "titleCache" }),
-        @Index(name = "specimenOrObservationBaseIdentityCacheIndex", columnNames = { "identityCache" }) })
+@Table(name="SpecimenOrObservationBase", indexes = { @Index(name = "specimenOrObservationBaseTitleCacheIndex", columnList = "titleCache"),
+        @Index(name = "specimenOrObservationBaseIdentityCacheIndex", columnList = "identityCache") })
 public abstract class SpecimenOrObservationBase<S extends IIdentifiableEntityCacheStrategy<?>>
                 extends IdentifiableEntity<S>
                 implements IMultiLanguageTextHolder, IIntextReferenceTarget, IDescribable<DescriptionBase<S>>, IPublishable  {
+
     private static final long serialVersionUID = 6932680139334408031L;
     private static final Logger logger = Logger.getLogger(SpecimenOrObservationBase.class);
 
index 342824220e8056e5dd0c8da765d2472a987722cf..560b2be015619d586197e9888a2a2a625c2c6b4f 100644 (file)
@@ -23,6 +23,7 @@ import javax.persistence.Inheritance;
 import javax.persistence.InheritanceType;
 import javax.persistence.Lob;
 import javax.persistence.ManyToOne;
+import javax.persistence.Table;
 import javax.persistence.Transient;
 import javax.validation.constraints.NotNull;
 import javax.validation.constraints.Pattern;
@@ -41,7 +42,6 @@ import org.apache.commons.lang.StringUtils;
 import org.apache.log4j.Logger;
 import org.hibernate.annotations.Cascade;
 import org.hibernate.annotations.CascadeType;
-import org.hibernate.annotations.Table;
 import org.hibernate.annotations.Type;
 import org.hibernate.envers.Audited;
 import org.hibernate.search.annotations.Analyze;
@@ -60,6 +60,7 @@ import eu.etaxonomy.cdm.model.common.IIntextReferenceTarget;
 import eu.etaxonomy.cdm.model.common.TimePeriod;
 import eu.etaxonomy.cdm.model.common.VerbatimTimePeriod;
 import eu.etaxonomy.cdm.model.media.IdentifiableMediaEntity;
+import eu.etaxonomy.cdm.model.name.TaxonName;
 import eu.etaxonomy.cdm.strategy.cache.reference.DefaultReferenceCacheStrategy;
 import eu.etaxonomy.cdm.strategy.cache.reference.INomenclaturalReferenceCacheStrategy;
 import eu.etaxonomy.cdm.strategy.match.Match;
@@ -124,7 +125,7 @@ import eu.etaxonomy.cdm.validation.annotation.ReferenceCheck;
 @Entity
 @Inheritance(strategy=InheritanceType.SINGLE_TABLE)
 @Audited
-@Table(appliesTo="Reference", indexes = { @org.hibernate.annotations.Index(name = "ReferenceTitleCacheIndex", columnNames = { "titleCache" }) })
+@Table(name="Reference", indexes = { @javax.persistence.Index(name = "ReferenceTitleCacheIndex", columnList = "titleCache") })
 //@InReference(groups=Level3.class)
 @ReferenceCheck(groups=Level2.class)
 @InReference(groups=Level3.class)
index 00f4ba2fbddb4507f9aafab8c341855b067ca831..507d8cd2ec9a914eed104e9fb96864e441928d0a 100644 (file)
@@ -13,6 +13,7 @@ package eu.etaxonomy.cdm.model.taxon;
 import java.lang.reflect.Field;
 import java.util.ArrayList;
 import java.util.Collections;
+import java.util.Comparator;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.Iterator;
@@ -854,6 +855,31 @@ public class Taxon
         return count;
     }
 
+    /**
+     * Returns the boolean value indicating whether <i>this</i> taxon is a invalid designation
+     * for at least one other taxon.
+     */
+    // TODO cache as for #hasTaxonomicChildren
+    @Transient
+    public boolean isInvalidDesignation(){
+        return computeInvalidDesignationRelations() > 0;
+    }
+
+    /**
+     * Counts the number of invalid designation relationships where this taxon represents the
+     * invalid designation for another taxon.
+     * @return
+     */
+    private int computeInvalidDesignationRelations(){
+        int count = 0;
+        for (TaxonRelationship rel: this.getRelationsFromThisTaxon()){
+            if (rel.getType().isInvalidDesignation()){
+                count++;
+            }
+        }
+        return count;
+    }
+
     /**
      * Returns the boolean value indicating whether <i>this</i> taxon is a related
      * concept for at least one other taxon.
@@ -1553,6 +1579,88 @@ public class Taxon
         return result;
     }
 
+    /**
+     * @param comparator
+     * @return
+     *
+     * @see     #getSynonymsGroups()
+     */
+    @Transient
+    public List<Taxon> getAllMisappliedNames(){
+        List<Taxon> result = new ArrayList<>();
+
+        for (TaxonRelationship rel : this.getRelationsToThisTaxon()){
+            if (rel.getType().isAnyMisappliedName() ){
+                result.add(rel.getFromTaxon());
+            }
+        }
+        sortBySimpleTitleCacheComparator(result);
+        return result;
+    }
+
+    /**
+     * @param comparator
+     * @return
+     *
+     * @see     #getSynonymsGroups()
+     */
+    @Transient
+    public List<Taxon> getInvalidDesignations(){
+        List<Taxon> result = new ArrayList<>();
+        for (TaxonRelationship rel : this.getRelationsToThisTaxon()){
+            if (rel.getType().isInvalidDesignation()){
+                result.add(rel.getFromTaxon());
+            }
+        }
+        sortBySimpleTitleCacheComparator(result);
+        return result;
+    }
+
+    /**
+     * @param comparator
+     * @return
+     *
+     * @see     #getSynonymsGroups()
+     */
+    @Transient
+    public List<Taxon> getAllProParteSynonyms(){
+        List<Taxon> result = new ArrayList<>();
+
+        for (TaxonRelationship rel : this.getRelationsToThisTaxon()){
+            if (rel.getType().isAnySynonym()){
+                result.add(rel.getFromTaxon());
+            }
+        }
+        sortBySimpleTitleCacheComparator(result);
+        return result;
+    }
+    /**
+     * @param result
+     */
+    private void sortBySimpleTitleCacheComparator(List<Taxon> result) {
+
+        Comparator<Taxon> taxonComparator = new Comparator<Taxon>(){
+
+            @Override
+            public int compare(Taxon o1, Taxon o2) {
+
+                if (o1.getTitleCache() == o2.getTitleCache()){
+                    return 0;
+                }
+                if (o1.getTitleCache() == null){
+                    return -1;
+                }
+                if (o2.getTitleCache() == null){
+                    return 1;
+                }
+                return o1.getTitleCache().compareTo(o2.getTitleCache());
+
+            }
+
+        };
+        Collections.sort(result, taxonComparator);
+    }
+
 
     /**
      * Returns the image gallery description. If no image gallery exists, a new one is created using the
index 58422ea1eaef0279184fb2c7afd3862e813cc249..dff388a184c8d7c00e5dfd0053baf9f01fd0e666 100644 (file)
@@ -15,7 +15,9 @@ import java.util.List;
 import javax.persistence.Column;
 import javax.persistence.Entity;
 import javax.persistence.FetchType;
+import javax.persistence.Index;
 import javax.persistence.ManyToOne;
+import javax.persistence.Table;
 import javax.persistence.Transient;
 import javax.validation.constraints.NotNull;
 import javax.xml.bind.annotation.XmlAccessType;
@@ -29,8 +31,6 @@ import javax.xml.bind.annotation.XmlType;
 import org.apache.log4j.Logger;
 import org.hibernate.annotations.Cascade;
 import org.hibernate.annotations.CascadeType;
-import org.hibernate.annotations.Index;
-import org.hibernate.annotations.Table;
 import org.hibernate.envers.Audited;
 import org.hibernate.search.annotations.Analyze;
 import org.hibernate.search.annotations.ClassBridge;
@@ -90,14 +90,14 @@ import eu.etaxonomy.cdm.validation.annotation.TaxonNameCannotBeAcceptedAndSynony
 @Entity
 @Audited
 //@PreFilter("hasPermission(filterObject, 'edit')")
-@Table(appliesTo="TaxonBase", indexes = { @Index(name = "taxonBaseTitleCacheIndex", columnNames = { "titleCache" }) })
+@Table(name="TaxonBase", indexes = { @Index(name = "taxonBaseTitleCacheIndex", columnList = "titleCache") })
 @TaxonNameCannotBeAcceptedAndSynonym(groups = Level3.class)
 @ClassBridges({
     @ClassBridge(name="classInfo",
             index = org.hibernate.search.annotations.Index.YES,
             store = Store.YES,
             impl = ClassInfoBridge.class),
-    @ClassBridge(name="accTaxon", // TODO rename to acceptedTaxon, since we are usually not using abbreviations for field names, see also ACC_TAXON_BRIDGE_PREFIX
+    @ClassBridge(name=AcceptedTaxonBridge.ACC_TAXON, // TODO rename to acceptedTaxon, since we are usually not using abbreviations for field names, see also ACC_TAXON_BRIDGE_PREFIX
             index = org.hibernate.search.annotations.Index.YES,
             store = Store.YES,
             impl = AcceptedTaxonBridge.class),
index cdfd0ccef4878e2183b6fd476358228513229d5f..ad0cae80436ee80df97ab6ae654b5dd787ac85ac 100644 (file)
@@ -25,6 +25,7 @@ import javax.persistence.MapKeyJoinColumn;
 import javax.persistence.OneToMany;
 import javax.persistence.OrderBy;
 import javax.persistence.OrderColumn;
+import javax.persistence.Table;
 import javax.persistence.Transient;
 import javax.xml.bind.annotation.XmlAccessType;
 import javax.xml.bind.annotation.XmlAccessorType;
@@ -41,11 +42,13 @@ import org.apache.log4j.Logger;
 import org.hibernate.LazyInitializationException;
 import org.hibernate.annotations.Cascade;
 import org.hibernate.annotations.CascadeType;
-import org.hibernate.annotations.Index;
-import org.hibernate.annotations.Table;
 import org.hibernate.envers.Audited;
+import org.hibernate.search.annotations.Analyze;
 import org.hibernate.search.annotations.ContainedIn;
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Index;
 import org.hibernate.search.annotations.IndexedEmbedded;
+import org.hibernate.search.annotations.Store;
 
 import eu.etaxonomy.cdm.hibernate.HHH_9751_Util;
 import eu.etaxonomy.cdm.hibernate.HibernateProxyHelper;
@@ -90,11 +93,14 @@ import eu.etaxonomy.cdm.validation.annotation.ChildTaxaMustNotSkipRanks;
 //@Indexed disabled to reduce clutter in indexes, since this type is not used by any search
 //@Indexed(index = "eu.etaxonomy.cdm.model.taxon.TaxonNode")
 @Audited
-@Table(appliesTo="TaxonNode", indexes = { @Index(name = "taxonNodeTreeIndex", columnNames = { "treeIndex" }) })
+@Table(name="TaxonNode", indexes = { @javax.persistence.Index(name = "taxonNodeTreeIndex", columnList = "treeIndex") })
 @ChildTaxaMustBeLowerRankThanParent(groups = Level3.class)
 @ChildTaxaMustNotSkipRanks(groups = Level3.class)
 @ChildTaxaMustDeriveNameFromParent(groups = Level3.class)
-public class TaxonNode extends AnnotatableEntity implements ITaxonTreeNode, ITreeNode<TaxonNode>, Cloneable{
+public class TaxonNode
+            extends AnnotatableEntity
+            implements ITaxonTreeNode, ITreeNode<TaxonNode>, Cloneable{
+
     private static final long serialVersionUID = -4743289894926587693L;
     private static final Logger logger = Logger.getLogger(TaxonNode.class);
 
@@ -117,6 +123,7 @@ public class TaxonNode extends AnnotatableEntity implements ITaxonTreeNode, ITre
 
     @XmlElement(name = "treeIndex")
     @Column(length=255)
+    @Field(store = Store.YES, index = Index.YES, analyze = Analyze.NO)
     private String treeIndex;
 
 
@@ -449,6 +456,14 @@ public class TaxonNode extends AnnotatableEntity implements ITaxonTreeNode, ITre
     public void setTreeIndex(String treeIndex) {
         this.treeIndex = treeIndex;
     }
+    @Override
+    public String treeIndexLike() {
+        return treeIndex + "%";
+    }
+    @Override
+    public String treeIndexWc() {
+        return treeIndex + "*";
+    }
 
 
 
@@ -944,40 +959,34 @@ public class TaxonNode extends AnnotatableEntity implements ITaxonTreeNode, ITre
     }
 
     /**
-     * Whether this TaxonNode is a descendant of the given TaxonNode
-     *
-     * Caution: use this method with care on big branches. -> performance and memory hungry
-     *
-     * Protip: Try solving your problem with the isAscendant method which traverses the tree in the
-     * other direction (up). It will always result in a rather small set of consecutive parents beeing
-     * generated.
-     *
-     * TODO implement more efficiently without generating the set of descendants first
+     * Whether this TaxonNode is a descendant of (or equal to) the given TaxonNode
      *
      * @param possibleParent
-     * @return true if this is a descendant
+     * @return <code>true</code> if <b>this</b> is a descendant
      */
     @Transient
     public boolean isDescendant(TaxonNode possibleParent){
-       if (this.treeIndex() == null || possibleParent.treeIndex() == null) {
+       if (possibleParent == null || this.treeIndex() == null
+               || possibleParent.treeIndex() == null) {
                return false;
        }
-       return possibleParent == null ? false : this.treeIndex().startsWith(possibleParent.treeIndex() );
+       return this.treeIndex().startsWith(possibleParent.treeIndex() );
     }
 
     /**
-     * Whether this TaxonNode is an ascendant of the given TaxonNode
+     * Whether this TaxonNode is an ascendant of (or equal to) the given TaxonNode.
+     *
      *
      * @param possibleChild
-     * @return true if there are ascendants
+     * @return <code>true</code> if <b>this</b> is a ancestor of the given child parameter
      */
     @Transient
     public boolean isAncestor(TaxonNode possibleChild){
-       if (this.treeIndex() == null || possibleChild.treeIndex() == null) {
+       if (possibleChild == null || this.treeIndex() == null || possibleChild.treeIndex() == null) {
                return false;
        }
        // return possibleChild == null ? false : possibleChild.getAncestors().contains(this);
-        return possibleChild == null ? false : possibleChild.treeIndex().startsWith(this.treeIndex());
+        return  possibleChild.treeIndex().startsWith(this.treeIndex());
     }
 
     /**
@@ -991,7 +1000,6 @@ public class TaxonNode extends AnnotatableEntity implements ITaxonTreeNode, ITre
         return childNodes.size() > 0;
     }
 
-
     public boolean hasTaxon() {
         return (taxon!= null);
     }
index aa0a5a4e07847928e4dccfef3fffd77357e42f11..3307c844ae1de67ed602a747426c553d79a57fe0 100644 (file)
@@ -171,7 +171,20 @@ public class TaxonRelationshipType extends RelationshipTermBase<TaxonRelationshi
        public boolean isMisappliedNameOrInvalidDesignation(){
         if (this.isAnyMisappliedName()){
             return true;
-        }else if (this.equals(INVALID_DESIGNATION_FOR())){
+        }else if (isInvalidDesignation()){
+            return true;
+        }
+        return false;
+    }
+
+    /**
+     * <code>true</code> if this relationship type is an
+     * {@link #INVALID_DESIGNATION_FOR() invalid designation}
+     *
+     * @see #isAnyMisappliedName()()
+     */
+    public boolean isInvalidDesignation(){
+        if (this.equals(INVALID_DESIGNATION_FOR())){
             return true;
         }
         return false;
index 8b23bd8883c64d879fe416f019d902225a4ae42b..d5c0c6befe92045a9c2e8e757e54dde3f2ca718f 100644 (file)
@@ -8,6 +8,7 @@
 */
 package eu.etaxonomy.cdm.strategy.cache.reference;
 
+import java.util.List;
 import java.util.UUID;
 
 import org.apache.commons.lang.StringUtils;
@@ -17,6 +18,9 @@ import org.joda.time.format.DateTimeFormat;
 import org.joda.time.format.DateTimeFormatter;
 
 import eu.etaxonomy.cdm.common.CdmUtils;
+import eu.etaxonomy.cdm.hibernate.HibernateProxyHelper;
+import eu.etaxonomy.cdm.model.agent.Person;
+import eu.etaxonomy.cdm.model.agent.Team;
 import eu.etaxonomy.cdm.model.agent.TeamOrPersonBase;
 import eu.etaxonomy.cdm.model.common.CdmBase;
 import eu.etaxonomy.cdm.model.common.VerbatimTimePeriod;
@@ -275,6 +279,71 @@ public class DefaultReferenceCacheStrategy extends StrategyBase implements INome
         return stringBuilder.toString();
     }
 
+    /**
+     * @param reference
+     * @return
+     */
+    public String createShortCitation(Reference reference) {
+        TeamOrPersonBase<?> authorship = reference.getAuthorship();
+        String shortCitation = "";
+        if (authorship == null) {
+            return null;
+        }
+        authorship = HibernateProxyHelper.deproxy(authorship);
+        if (authorship instanceof Person){
+            shortCitation = ((Person)authorship).getFamilyName();
+            if (StringUtils.isBlank(shortCitation) ){
+                shortCitation = ((Person)authorship).getTitleCache();
+            }
+        }
+        else if (authorship instanceof Team){
+
+            Team authorTeam = HibernateProxyHelper.deproxy(authorship, Team.class);
+            int index = 0;
+
+            for (Person teamMember : authorTeam.getTeamMembers()){
+                index++;
+                if (index == 3){
+                    shortCitation += " & al.";
+                    break;
+                }
+                String concat = concatString(authorTeam, authorTeam.getTeamMembers(), index, ", ", " & ");
+                if (teamMember.getFamilyName() != null){
+                    shortCitation += concat + teamMember.getFamilyName();
+                }else{
+                    shortCitation += concat + teamMember.getTitleCache();
+                }
+
+            }
+            if (StringUtils.isBlank(shortCitation)){
+                shortCitation = authorTeam.getTitleCache();
+            }
+
+        }
+        if (reference.getDatePublished() != null) {
+            if (!StringUtils.isBlank(reference.getDatePublished().getFreeText())){
+                shortCitation = shortCitation + " (" + reference.getDatePublished().getFreeText() + ")";
+            }else if (!StringUtils.isBlank(reference.getYear()) ){
+                shortCitation = shortCitation + " (" + reference.getYear() + ")";
+            }
+        }
+
+        return shortCitation;
+    }
+
+    private static String concatString(Team team, List<Person> teamMembers, int i, String std_team_concatination, String final_team_concatination) {
+        String concat;
+        if (i <= 1){
+            concat = "";
+        }else if (i < teamMembers.size() || ( team.isHasMoreMembers() && i == teamMembers.size())){
+            concat = std_team_concatination;
+        }else{
+            concat = final_team_concatination;
+        }
+        return concat;
+    }
+
+
     @Override
     public String getNomenclaturalCache(Reference reference) {
         return this.getNomenclaturalCitation(reference, null);
index b3327d200a2be3ba65992dd2fcb2329cbc2ca782..1c0c9461a68af2ce0925759c671118691faa4a80 100644 (file)
@@ -18,6 +18,7 @@ import org.apache.commons.lang.StringUtils;
 import eu.etaxonomy.cdm.hibernate.HibernateProxyHelper;
 import eu.etaxonomy.cdm.model.agent.Person;
 import eu.etaxonomy.cdm.model.agent.Team;
+import eu.etaxonomy.cdm.model.common.CdmBase;
 import eu.etaxonomy.cdm.model.name.TaxonName;
 import eu.etaxonomy.cdm.model.reference.Reference;
 import eu.etaxonomy.cdm.model.taxon.Synonym;
@@ -75,7 +76,7 @@ public class TaxonBaseShortSecCacheStrategy<T extends TaxonBase>
                        if (sec.getAuthorship() != null){
 
                                if (sec.getAuthorship().isInstanceOf(Team.class)){
-                                       Team authorTeam = HibernateProxyHelper.deproxy(sec.getAuthorship(), Team.class);
+                                       Team authorTeam = CdmBase.deproxy(sec.getAuthorship(), Team.class);
                                        if (authorTeam.getTeamMembers().size() > 2){
                                                if (authorTeam.getTeamMembers().get(0).getFamilyName() != null){
                                                result = authorTeam.getTeamMembers().get(0).getFamilyName() + " & al.";
index 06b259d93784332719e7eaf941d68c21da13371c..cdaddf528dc59097333bf1915c9b3ac6877f69a1 100644 (file)
@@ -45,16 +45,16 @@ public class TimePeriodParser {
        //case fl. 1806 or c. 1806 or fl. 1806?
        private static final Pattern prefixedYearPattern =  Pattern.compile("(fl|c)\\.\\s*\\d{4}(\\s*-\\s*\\d{4})?\\??");
        //standard
-       private static final Pattern standardPattern =  Pattern.compile("\\s*\\d{2,4}(\\s*-(\\s*\\d{2,4})?)?");
+       private static final Pattern standardPattern =  Pattern.compile("\\s*\\d{2,4}(\\s*-(\\s*\\d{2,4})?|\\+)?");
        private static final String strDotDate = "[0-3]?\\d\\.[01]?\\d\\.\\d{4,4}";
-       private static final String strDotDatePeriodPattern = String.format("%s(\\s*-\\s*%s?)?", strDotDate, strDotDate);
+       private static final String strDotDatePeriodPattern = String.format("%s(\\s*-\\s*%s|\\+)?", strDotDate, strDotDate);
        private static final Pattern dotDatePattern =  Pattern.compile(strDotDatePeriodPattern);
        private static final String strSlashDate = "[0-3]?\\d\\/[01]?\\d\\/\\d{4,4}";
-       private static final String strSlashDatePeriodPattern = String.format("%s(\\s*-\\s*%s?)?", strSlashDate, strSlashDate);
+       private static final String strSlashDatePeriodPattern = String.format("%s(\\s*-\\s*%s|\\+)?", strSlashDate, strSlashDate);
        private static final Pattern slashDatePattern =  Pattern.compile(strSlashDatePeriodPattern);
        private static final Pattern lifeSpanPattern =  Pattern.compile(String.format("%s--%s", firstYearPattern, firstYearPattern));
        private static final String strMonthes = "((Jan|Feb|Aug|Sept?|Oct(ober)?|Nov|Dec)\\.?|(Mar(ch)?|Apr(il)?|May|June?|July?))";
-       private static final String strDateWithMonthes = "([0-3]?\\d" + dotOrWs + ")?" + strMonthes + dotOrWs + "\\d{4,4}";
+       private static final String strDateWithMonthes = "([0-3]?\\d" + dotOrWs + ")?" + strMonthes + dotOrWs + "\\d{4,4}\\+?";
        private static final Pattern dateWithMonthNamePattern = Pattern.compile(strDateWithMonthes);
 
        public static <T extends TimePeriod> T parseString(T timePeriod, String periodString){
@@ -72,14 +72,14 @@ public class TimePeriodParser {
 
                result.setFreeText(null);
 
-               //case "1806"[1807];
+               //case "1806"[1807];  => TODO this should (and is?) handled in parse verbatim, should be removed here
                if (uncorrectYearPatter.matcher(periodString).matches()){
                        result.setFreeText(periodString);
                        String realYear = periodString.split("\\[")[1];
                        realYear = realYear.replace("]", "");
                        result.setStartYear(Integer.valueOf(realYear));
                        result.setFreeText(periodString);
-               //case fl. 1806 or c. 1806 or fl. 1806?
+               //case fl. 1806 or c. 1806 or fl. 1806?  => TODO questionable if this should really be handled here, fl. probably stands for flowering and is not part of the date but of the date  context. What stands "c." for? Used by Markup import?
                }else if(prefixedYearPattern.matcher(periodString).matches()){
                        result.setFreeText(periodString);
                        Matcher yearMatcher = firstYearPattern.matcher(periodString);
@@ -199,13 +199,14 @@ public class TimePeriodParser {
             result.setFreeText(periodString);
         }else {
             try {
+                dtEnd = handleContinued(dates, dtEnd);
                 //start
-                if (! StringUtils.isBlank(dates[0])){
+                if (isNotBlank(dates[0])){
                     dtStart = parseSingleSlashDate(dates[0].trim());
                 }
 
                 //end
-                if (dates.length >= 2 && ! StringUtils.isBlank(dates[1])){
+                if (dates.length >= 2 && isNotBlank(dates[1])){
                     dtEnd = parseSingleSlashDate(dates[1].trim());
                 }
 
@@ -216,7 +217,6 @@ public class TimePeriodParser {
                 result.setFreeText(periodString);
             }
         }
-
     }
 
 
@@ -234,9 +234,10 @@ public class TimePeriodParser {
                        result.setFreeText(periodString);
                }else {
                        try {
+                           dtEnd = handleContinued(dates, dtEnd);
                                //start
                                if (! StringUtils.isBlank(dates[0])){
-                                       dtStart = parseSingleDotDate(dates[0].trim());
+                                   dtStart = parseSingleDotDate(dates[0].trim());
                                }
 
                                //end
@@ -253,6 +254,21 @@ public class TimePeriodParser {
                }
        }
 
+    /**
+     * Checks if dates is a "continued" date (e.g. 2017+).
+     * If yes, dtEnd is returned as {@link TimePeriod#CONTINUED} and dates[0] is shortened by "+".
+     * @param dates
+     * @param dtEnd
+     * @return
+     */
+    protected static Partial handleContinued(String[] dates, Partial dtEnd) {
+        if (dates.length == 1 && dates[0].endsWith("+") && dates[0].length()>1){
+            dates[0] = dates[0].substring(0, dates[0].length()-1).trim();
+            dtEnd = TimePeriod.CONTINUED;
+        }
+        return dtEnd;
+    }
+
 
     /**
      * @param dateString
@@ -261,8 +277,6 @@ public class TimePeriodParser {
     private static void parseDateWithMonthName(String dateString, TimePeriod result) {
         String[] dates = dateString.split("(\\.|\\s+)+");
 
-
-
         if (dates.length > 3 || dates.length < 2){
             logger.warn("Not 2 or 3 date parts in date string: " + dateString);
             result.setFreeText(dateString);
@@ -272,6 +286,10 @@ public class TimePeriodParser {
             String strMonth = hasNoDay? dates[0] : dates[1];
             String strDay = hasNoDay? null : dates[0];
             try {
+                if (strYear.endsWith("+")){
+                    strYear = strYear.substring(0, strYear.length()-1).trim();
+                    result.setContinued(true);
+                }
                 Integer year = Integer.valueOf(strYear.trim());
                 Integer month = monthNrFormName(strMonth.trim());
                 Integer day = strDay == null ? null : Integer.valueOf(strDay.trim());
@@ -350,7 +368,7 @@ public class TimePeriodParser {
 
     }
 
-
+    //TODO "continued" not yet handled, probably looks different here (e.g. 2017--x)
     private static void parseLifeSpanPattern(String periodString, TimePeriod result) {
 
                try{
@@ -381,6 +399,7 @@ public class TimePeriodParser {
                        logger.warn("More than 1 '-' in period String: " + periodString);
                }else {
                        try {
+                           dtEnd = handleContinued(years, dtEnd);
                                //start
                                if (! StringUtils.isBlank(years[0])){
                                        dtStart = parseSingleDate(years[0].trim());
@@ -607,8 +626,13 @@ public class TimePeriodParser {
      * @return the parsed period
      */
     private static TimePeriod parseEnglishDate(String strFrom, String strTo, boolean isAmerican) {
-        Partial dateFrom = parseSingleEnglishDate(strFrom, isAmerican);
         Partial dateTo = parseSingleEnglishDate(strTo, isAmerican);
+        if (strFrom.endsWith("+") && dateTo == null){
+            dateTo = TimePeriod.CONTINUED;
+            strFrom = strFrom.substring(0, strFrom.length()-1).trim();
+        }
+
+        Partial dateFrom = parseSingleEnglishDate(strFrom, isAmerican);
         TimePeriod result = TimePeriod.NewInstance(dateFrom, dateTo);
         return result;
     }
@@ -643,4 +667,12 @@ public class TimePeriodParser {
         return result;
     }
 
+
+    private static boolean isBlank(String str){
+        return StringUtils.isBlank(str);
+    }
+    private static boolean isNotBlank(String str){
+        return StringUtils.isNotBlank(str);
+    }
+
 }
index d2ec4cee00b323123ef537812f54329ea918ead7..b4bb8840705e9ecd58abf8b1baa289fe86611d28 100644 (file)
@@ -17,6 +17,7 @@ import org.junit.Test;
 
 import eu.etaxonomy.cdm.format.taxon.TaxonRelationshipFormatter;
 import eu.etaxonomy.cdm.model.agent.Person;
+import eu.etaxonomy.cdm.model.agent.Team;
 import eu.etaxonomy.cdm.model.common.DefaultTermInitializer;
 import eu.etaxonomy.cdm.model.common.Language;
 import eu.etaxonomy.cdm.model.common.VerbatimTimePeriod;
@@ -39,6 +40,8 @@ import eu.etaxonomy.cdm.strategy.cache.TaggedText;
  */
 public class TaxonRelationshipFormatterTest {
 
+    private static boolean WITHOUT_NAME = true;
+
     private TaxonRelationship taxonRel;
     private Reference relSec;
 
@@ -53,7 +56,10 @@ public class TaxonRelationshipFormatterTest {
     private TaxonRelationshipFormatter formatter;
     private boolean reverse;
 
-    Person toNameAuthor;
+    private Person toNameAuthor;
+    private Person macFarlane;
+    private Person cheek;
+    private Person toSecAuthor;
     private List<Language> languages;
 
 
@@ -85,18 +91,21 @@ public class TaxonRelationshipFormatterTest {
         fromSec = ReferenceFactory.newGeneric();
         fromSec.setTitle("From Sec");
         String initials = "J.M.";
-        fromSec.setAuthorship(Person.NewInstance(null, "Macfarlane", initials, null));
+        macFarlane = Person.NewInstance(null, "Macfarlane", initials, null);
+        fromSec.setAuthorship(macFarlane);
         fromSec.setDatePublished(VerbatimTimePeriod.NewVerbatimInstance(1918));
 
         relSec = ReferenceFactory.newGeneric();
         relSec.setTitle("From rel reference");
         initials = null; //"M.R.";
-        relSec.setAuthorship(Person.NewInstance(null, "Cheek", initials, null));
+        cheek = Person.NewInstance(null, "Cheek", initials, null);
+        relSec.setAuthorship(cheek);
         relSec.setDatePublished(VerbatimTimePeriod.NewVerbatimInstance(1919));
 
         toSec = ReferenceFactory.newGeneric();
         toSec.setTitle("To Sec");
-        toSec.setAuthorship(Person.NewTitledInstance("ToSecAuthor"));
+        toSecAuthor = Person.NewTitledInstance("ToSecAuthor");
+        toSec.setAuthorship(toSecAuthor);
         toSec.setDatePublished(VerbatimTimePeriod.NewVerbatimInstance(1928));
 
         fromTaxon = Taxon.NewInstance(fromName, fromSec);
@@ -150,7 +159,7 @@ public class TaxonRelationshipFormatterTest {
         tags = formatter.getTaggedText(taxonRel, reverse, languages);
         str = TaggedCacheHelper.createString(tags);
         System.out.println(str);
-        Assert.assertEquals(inverseSymbol + " ?\"Abies alba\" auct., err. sec. Cheek 1919: 123", str);
+        Assert.assertEquals(inverseSymbol + " ?\u202F\"Abies alba\" auct., err. sec. Cheek 1919: 123", str);
 
     }
 
@@ -192,7 +201,7 @@ public class TaxonRelationshipFormatterTest {
         toTaxon.setAppendedPhrase("");
         tags = formatter.getTaggedText(taxonRel, reverse, languages);
         str = TaggedCacheHelper.createString(tags);
-        Assert.assertEquals(SYMBOL + " ?Pinus pinova Mill. sec. ???, rel. sec. Cheek 1919: 123", str);
+        Assert.assertEquals("?" + SYMBOL + " Pinus pinova Mill. sec. ???, rel. sec. Cheek 1919: 123", str);
 
     }
 
@@ -232,4 +241,112 @@ public class TaxonRelationshipFormatterTest {
 
     }
 
+    @Test
+    public void testGetFamilyNames() {
+
+        //Test start condition with single person
+        List<TaggedText> tags = formatter.getTaggedText(taxonRel, reverse, languages);
+        String str = TaggedCacheHelper.createString(tags);
+        Assert.assertFalse("Formatted text should not contain the team correctly formatted", str.contains("Macfarlane & Cheek"));
+
+        //use team
+        Team secRelTeam = Team.NewInstance();
+        secRelTeam.addTeamMember(macFarlane);
+        secRelTeam.addTeamMember(cheek);
+        relSec.setAuthorship(secRelTeam);
+
+        tags = formatter.getTaggedText(taxonRel, reverse, languages);
+        str = TaggedCacheHelper.createString(tags);
+        System.out.println(str);
+        Assert.assertTrue(str.contains("rel. sec. Macfarlane & Cheek 1919"));
+
+        //add third member
+        secRelTeam.addTeamMember(toSecAuthor);
+        tags = formatter.getTaggedText(taxonRel, reverse, languages);
+        str = TaggedCacheHelper.createString(tags);
+        System.out.println(str);
+        Assert.assertTrue(str.contains("rel. sec. Macfarlane, Cheek & ToSecAuthor 1919"));
+
+        //add et al.
+        secRelTeam.setHasMoreMembers(true);
+        tags = formatter.getTaggedText(taxonRel, reverse, languages);
+        str = TaggedCacheHelper.createString(tags);
+        System.out.println(str);
+        Assert.assertTrue(str.contains("rel. sec. Macfarlane, Cheek, ToSecAuthor & al. 1919"));
+
+    }
+
+    @Test
+    public void testGetTaggedTextMisappliedNameWithoutName() {
+
+        reverse = true;
+        String inverseSymbol = TaxonRelationshipType.MISAPPLIED_NAME_FOR().getInverseSymbol();
+        String symbol = TaxonRelationshipType.MISAPPLIED_NAME_FOR().getSymbol();
+
+        List<TaggedText> tags = formatter.getTaggedText(taxonRel, reverse, languages, WITHOUT_NAME);
+        String str = TaggedCacheHelper.createString(tags);
+        Assert.assertEquals(inverseSymbol + " sensu Macfarlane 1918, err. sec. Cheek 1919: 123", str);
+
+        //reverse
+        tags = formatter.getTaggedText(taxonRel, !reverse, languages, WITHOUT_NAME);
+        str = TaggedCacheHelper.createString(tags);
+        Assert.assertEquals(symbol + " sec. ToSecAuthor 1928, rel. sec. Cheek 1919: 123", str);
+
+        //auctores
+        fromTaxon.setAppendedPhrase("auctores");
+        tags = formatter.getTaggedText(taxonRel, reverse, languages, WITHOUT_NAME);
+        str = TaggedCacheHelper.createString(tags);
+        Assert.assertEquals(inverseSymbol + " auctores sensu Macfarlane 1918, err. sec. Cheek 1919: 123", str);
+
+        fromTaxon.setSec(null);
+        fromTaxon.setAppendedPhrase("");
+        tags = formatter.getTaggedText(taxonRel, reverse, languages, WITHOUT_NAME);
+        str = TaggedCacheHelper.createString(tags);
+        Assert.assertEquals(inverseSymbol + " auct., err. sec. Cheek 1919: 123", str);
+
+        fromTaxon.setDoubtful(true);
+        tags = formatter.getTaggedText(taxonRel, reverse, languages, WITHOUT_NAME);
+        str = TaggedCacheHelper.createString(tags);
+        System.out.println(str);
+        Assert.assertEquals(inverseSymbol + " ?\u202F auct., err. sec. Cheek 1919: 123", str);
+
+    }
+
+    @Test
+    public void testGetTaggedTextConceptRelationsWithoutName() {
+
+        reverse = false;
+
+        TaxonRelationshipType relType = TaxonRelationshipType.INCLUDES();
+
+        final String SYMBOL = relType.getSymbol();
+
+        taxonRel.setType(relType);
+        List<TaggedText> tags = formatter.getTaggedText(taxonRel, reverse, languages, WITHOUT_NAME);
+        String str = TaggedCacheHelper.createString(tags);
+        Assert.assertEquals(SYMBOL + " sec. ToSecAuthor 1928, rel. sec. Cheek 1919: 123", str);
+
+        tags = formatter.getTaggedText(taxonRel, !reverse, languages, WITHOUT_NAME);
+        str = TaggedCacheHelper.createString(tags);
+        Assert.assertEquals(relType.getInverseSymbol() + " sec. Macfarlane 1918, rel. sec. Cheek 1919: 123", str);
+
+        toTaxon.setAppendedPhrase("sensu stricto");
+        tags = formatter.getTaggedText(taxonRel, reverse, languages, WITHOUT_NAME);
+        str = TaggedCacheHelper.createString(tags);
+        Assert.assertEquals(SYMBOL + " sensu stricto sec. ToSecAuthor 1928, rel. sec. Cheek 1919: 123", str);
+
+        toTaxon.setSec(null);
+        toTaxon.setAppendedPhrase("");
+        tags = formatter.getTaggedText(taxonRel, reverse, languages, WITHOUT_NAME);
+        str = TaggedCacheHelper.createString(tags);
+        Assert.assertEquals(SYMBOL + " sec. ???, rel. sec. Cheek 1919: 123", str);
+
+        taxonRel.setDoubtful(true);
+        toTaxon.setAppendedPhrase("");
+        tags = formatter.getTaggedText(taxonRel, reverse, languages, WITHOUT_NAME);
+        str = TaggedCacheHelper.createString(tags);
+        Assert.assertEquals("?" + SYMBOL + " sec. ???, rel. sec. Cheek 1919: 123", str);
+
+    }
+
 }
index c03a4d2381c765b892bbd1bb04cba80736e2a886..0c37e2ba8b8dfe8493eed84732fa44dcef30460f 100644 (file)
@@ -305,6 +305,8 @@ public class TimePeriodTest {
                Assert.assertEquals("Year should be 1999", "1999", tp.getYear());\r
                tp.setEndYear(2002);\r
                Assert.assertEquals("Year should be 1999-2002", "1999-2002", tp.getYear());\r
+               tp.setContinued(true);\r
+               Assert.assertEquals("Year should be 1999+", "1999+", tp.getYear());\r
        }\r
 \r
 \r
@@ -320,8 +322,49 @@ public class TimePeriodTest {
                Assert.assertEquals("3.xx.1788-1799", tp1.toString());\r
                tp1.setEndMonth(11);\r
                Assert.assertEquals("3.xx.1788-11.1799", tp1.toString());\r
+               tp1.setContinued(true);\r
+               Assert.assertEquals("3.xx.1788+", tp1.toString());\r
+\r
+               tp1 = TimePeriod.NewInstance(1788,1799);\r
+               tp1.setContinued(true);\r
+        Assert.assertEquals("1788+", tp1.toString());\r
+        tp1 = TimePeriod.NewInstance((Integer)null);\r
+        tp1.setContinued(true);\r
+        //this is still undefined, could be something like 'xxxx+' in future\r
+        Assert.assertEquals("+", tp1.toString());\r
        }\r
 \r
+       @Test\r
+       public void testContinued() {\r
+           TimePeriod tp1 = TimePeriod.NewInstance(2017, 2018);\r
+           Assert.assertEquals((Integer)2018, tp1.getEndYear());\r
+           tp1.setContinued(true);\r
+           Assert.assertNull("The end should be removed and also the CONTINUED constant should be returned for getEnd()", tp1.getEnd());\r
+           Assert.assertTrue(tp1.isContinued());\r
+        Assert.assertEquals(null, tp1.getEndYear());\r
+           Assert.assertEquals(null, tp1.getEndMonth());\r
+           Assert.assertEquals(null, tp1.getEndDay());\r
+\r
+           //set continued to false (will not recover old end value)\r
+           tp1.setContinued(false);\r
+           Assert.assertFalse(tp1.isContinued());\r
+        Assert.assertEquals(null, tp1.getEndYear());\r
+        Assert.assertEquals(null, tp1.getEndMonth());\r
+        Assert.assertEquals(null, tp1.getEndDay());\r
+\r
+        //replace continued by end\r
+        tp1 = TimePeriod.NewInstance(2017, 2018);\r
+        tp1.setContinued(true);\r
+        Assert.assertTrue(tp1.isContinued());\r
+        tp1.setEndMonth(month);\r
+           Assert.assertFalse(tp1.isContinued());\r
+        Assert.assertEquals(null, tp1.getEndYear());\r
+        Assert.assertEquals(month, tp1.getEndMonth());\r
+        Assert.assertEquals(null, tp1.getEndDay());\r
+\r
+       }\r
+\r
+\r
 \r
        /**\r
         * Test method for {@link eu.etaxonomy.cdm.model.common.TimePeriod#clone()}.\r
diff --git a/cdmlib-model/src/test/java/eu/etaxonomy/cdm/model/common/TreeIndexTest.java b/cdmlib-model/src/test/java/eu/etaxonomy/cdm/model/common/TreeIndexTest.java
new file mode 100644 (file)
index 0000000..db6e90a
--- /dev/null
@@ -0,0 +1,73 @@
+/**
+* Copyright (C) 2018 EDIT
+* European Distributed Institute of Taxonomy
+* http://www.e-taxonomy.eu
+*
+* The contents of this file are subject to the Mozilla Public License Version 1.1
+* See LICENSE.TXT at the top of this package for the full license terms.
+*/
+package eu.etaxonomy.cdm.model.common;
+
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+/**
+ * @author a.mueller
+ * @since 12.09.2018
+ *
+ */
+public class TreeIndexTest {
+
+    TreeIndex indexRoot;
+    TreeIndex indexTree;
+
+    @Before
+    public void setUp() throws Exception {
+        indexRoot = TreeIndex.NewInstance("#t1#222#");
+        //maybe not allowed in future, see comment on TreeIndex.regEx
+        indexTree = TreeIndex.NewInstance("#t1#");
+    }
+
+
+    @Test
+    public void testParse() {
+        try {
+            indexTree = TreeIndex.NewInstance("#t1#11");
+            Assert.fail("Index string must end with #");
+        } catch (Exception e) {}
+        try {
+            indexTree = TreeIndex.NewInstance("t1#11#");
+            Assert.fail("Index string must start with #");
+        } catch (Exception e) {}
+
+        try {
+            indexTree = TreeIndex.NewInstance("#1#11#");
+            Assert.fail("Index must start with tree identifier which starts with a single character a-z");
+        } catch (Exception e) {}
+
+        try {
+            indexTree = TreeIndex.NewInstance("#tt1#11#");
+            Assert.fail("Tree identifier must have only 1 character a-z");
+        } catch (Exception e) {}
+
+        try {
+            indexTree = TreeIndex.NewInstance("#t1#t11#");
+            Assert.fail("Node identifier must have no character a-z");
+        } catch (Exception e) {}
+
+    }
+
+    @Test
+    public void testIsTreeRoot() {
+        Assert.assertTrue("Index should be tree root", indexRoot.isTreeRoot());
+        Assert.assertFalse("Index should not be tree", indexRoot.isTree());
+    }
+
+    @Test
+    public void testIsTree() {
+        Assert.assertFalse("Index should not be tree root", indexTree.isTreeRoot());
+        Assert.assertTrue("Index should be tree", indexTree.isTree());
+    }
+
+}
index 7c1f796b0ce275515fbd0af60d372f831a38741d..40030a77ef3141586c200ecbd1d987b2bd355cc6 100644 (file)
@@ -17,11 +17,8 @@ import static org.junit.Assert.assertTrue;
 import org.apache.log4j.Logger;
 import org.joda.time.DateTimeFieldType;
 import org.joda.time.Partial;
-import org.junit.After;
-import org.junit.AfterClass;
 import org.junit.Assert;
 import org.junit.Before;
-import org.junit.BeforeClass;
 import org.junit.Test;
 
 import eu.etaxonomy.cdm.common.UTF8;
@@ -42,20 +39,6 @@ public class TimePeriodParserTest {
 //     private TimePeriod noStartAndEndYear;
 
 
-       /**
-        * @throws java.lang.Exception
-        */
-       @BeforeClass
-       public static void setUpBeforeClass() throws Exception {
-       }
-
-       /**
-        * @throws java.lang.Exception
-        */
-       @AfterClass
-       public static void tearDownAfterClass() throws Exception {
-       }
-
        /**
         * @throws java.lang.Exception
         */
@@ -69,13 +52,6 @@ public class TimePeriodParserTest {
 //             noStartAndEndYear = TimePeriod.NewInstance(start, end);
        }
 
-       /**
-        * @throws java.lang.Exception
-        */
-       @After
-       public void tearDown() throws Exception {
-       }
-
 
 //************************ TESTS ******************************************
 
@@ -203,8 +179,13 @@ public class TimePeriodParserTest {
                Assert.assertEquals(Integer.valueOf(1), tp.getEndMonth());
                Assert.assertEquals(Integer.valueOf(2), tp.getEndDay());
 
+       }
+
+       @Test
+       public void testSlashPattern() {
+
         String strSlashDate = "31/12/2015 - 2/1/2016";
-        tp = TimePeriodParser.parseString(strSlashDate);
+        TimePeriod tp = TimePeriodParser.parseString(strSlashDate);
         assertNotNull(tp);
         Assert.assertEquals("31.12.2015-2.1.2016", tp.toString());
         Assert.assertEquals("2015-2016", tp.getYear());
@@ -326,6 +307,46 @@ public class TimePeriodParserTest {
         Assert.assertEquals(null, tp.getVerbatimDate());
     }
 
+    @Test
+    public void testParseContinued() {
+        String strDate = "01.12.1957+";
+        TimePeriod tp = TimePeriodParser.parseString(strDate);
+        Assert.assertTrue(tp.isContinued());
+        Assert.assertEquals("1.12.1957+", tp.toString());
+        Assert.assertEquals(Integer.valueOf(1957), tp.getStartYear());
+        Assert.assertEquals(Integer.valueOf(12), tp.getStartMonth());
+        Assert.assertEquals(Integer.valueOf(1), tp.getStartDay());
+        Assert.assertNull(tp.getEnd());
+
+        strDate = "1957+";
+        tp = TimePeriodParser.parseString(strDate);
+        Assert.assertTrue(tp.isContinued());
+        Assert.assertEquals("1957+", tp.toString());
+        Assert.assertEquals(Integer.valueOf(1957), tp.getStartYear());
+        Assert.assertNull(tp.getStartMonth());
+        Assert.assertNull(tp.getStartDay());
+        Assert.assertNull(tp.getEnd());
+
+        strDate = "24 Aug. 1957+";
+        tp = TimePeriodParser.parseString(strDate);
+        Assert.assertEquals("24.8.1957+", tp.toString());
+        Assert.assertTrue(tp.isContinued());
+        Assert.assertEquals("1957+", tp.getYear());
+        Assert.assertEquals(Integer.valueOf(1957), tp.getStartYear());
+        Assert.assertEquals(Integer.valueOf(8), tp.getStartMonth());
+        Assert.assertEquals(Integer.valueOf(24), tp.getStartDay());
+
+        String strSlashDate = "31/12/2015+";
+        tp = TimePeriodParser.parseString(strSlashDate);
+        Assert.assertEquals("31.12.2015+", tp.toString());
+        Assert.assertTrue(tp.isContinued());
+        Assert.assertEquals("2015+", tp.getYear());
+        Assert.assertEquals(Integer.valueOf(2015), tp.getStartYear());
+        Assert.assertEquals(Integer.valueOf(12), tp.getStartMonth());
+        Assert.assertEquals(Integer.valueOf(31), tp.getStartDay());
+        Assert.assertNull(tp.getEnd());
+
+    }
 
 
 }
index e2f63a515ae7c64532d6bbf2db6bb5611e2854cf..f4762aca18f0d8dacde41831df84f7e57e658556 100644 (file)
@@ -3,7 +3,7 @@
   <parent>
     <groupId>eu.etaxonomy</groupId>
     <artifactId>cdmlib-parent</artifactId>
-    <version>5.2.0</version>
+    <version>5.3.0</version>
     <relativePath>../pom.xml</relativePath>
   </parent>
 
index a2699d72f297c3806017d03cff856d33e4708dda..a96584e840f8f65d9451e17e26f4c1f887eefe8a 100644 (file)
@@ -173,6 +173,7 @@ public abstract class IoResultBase implements Serializable{
      */
     public StringBuffer createReport() {
         StringBuffer report = new StringBuffer("");
+        addShortDescription(report);
         addErrorReport(report, "Errors", errors);
         addErrorReport(report, "Exceptions", exceptions);
         addErrorReport(report, "Warnings", warnings);
@@ -180,6 +181,16 @@ public abstract class IoResultBase implements Serializable{
     }
 
 
+
+
+
+    /**
+     * @param report
+     */
+    protected void addShortDescription(StringBuffer report) {
+        //do nothing
+
+    }
     /**
      * @param report
      * @param label
index 536acbfb6b5188a022c3f0b650ee9a5842d7575f..56b1fee65e0c9e4fb54ae1a485482b53f7461540 100644 (file)
@@ -466,14 +466,14 @@ public interface ICdmEntityDao<T extends CdmBase> {
      */
     public List<T> list(T example, Set<String> includeProperties, Integer limit, Integer start, List<OrderHint> orderHints, List<String> propertyPaths);
 
-    List<T> findByParamWithRestrictions(Class<? extends T> clazz, String param, String queryString, MatchMode matchmode, List<Restriction<?>> restrictions, Integer pageSize,
+    public  <S extends T> List<S> findByParamWithRestrictions(Class<S> clazz, String param, String queryString, MatchMode matchmode, List<Restriction<?>> restrictions, Integer pageSize,
             Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths);
 
-    long countByParamWithRestrictions(Class<? extends T> clazz, String param, String queryString, MatchMode matchmode, List<Restriction<?>> restrictions);
+    public long countByParamWithRestrictions(Class<? extends T> clazz, String param, String queryString, MatchMode matchmode, List<Restriction<?>> restrictions);
 
-    long countByParam(Class<? extends T> clazz, String param, String queryString, MatchMode matchmode, List<Criterion> criterion);
+    public long countByParam(Class<? extends T> clazz, String param, String queryString, MatchMode matchmode, List<Criterion> criterion);
 
-    List<T> findByParam(Class<? extends T> clazz, String param, String queryString, MatchMode matchmode, List<Criterion> criterion, Integer pageSize, Integer pageNumber,
+    public <S extends T> List<S> findByParam(Class<S> clazz, String param, String queryString, MatchMode matchmode, List<Criterion> criterion, Integer pageSize, Integer pageNumber,
             List<OrderHint> orderHints, List<String> propertyPaths);
 
 }
index 930bbb6356f527c3890a7733308144415832a530..0a83112fe3ba0a27d2e2996cceba53287be9737e 100644 (file)
@@ -111,7 +111,7 @@ public interface IIdentifiableDao <T extends IdentifiableEntity> extends IAnnota
     *            authorTeam.persistentTitleCache
     * @return a List of instances of type T matching the queryString
     */
-   public List<T> findByTitle(Class<? extends T> clazz, String queryString,MatchMode matchmode, List<Criterion> criteria, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths);
+   public <S extends T> List<S> findByTitle(Class<S> clazz, String queryString,MatchMode matchmode, List<Criterion> criteria, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths);
 
 
         /**
@@ -130,7 +130,7 @@ public interface IIdentifiableDao <T extends IdentifiableEntity> extends IAnnota
         *            authorTeam.persistentTitleCache
         * @return a List of instances of type T matching the queryString
         */
-       public List<T> findByTitleWithRestrictions(Class<? extends T> clazz, String queryString,MatchMode matchmode, List<Restriction<?>> restrictions, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths);
+       public <S extends T> List<S> findByTitleWithRestrictions(Class<S> clazz, String queryString,MatchMode matchmode, List<Restriction<?>> restrictions, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths);
 
 
 
@@ -144,7 +144,7 @@ public interface IIdentifiableDao <T extends IdentifiableEntity> extends IAnnota
         * @param matchMode
         * @return
         */
-       public List<T> findTitleCache(Class<? extends T> clazz, String queryString, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, MatchMode matchMode);
+       public <S extends T> List<S> findTitleCache(Class<S> clazz, String queryString, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, MatchMode matchMode);
     /**
     * Return a List of objects matching the given query string, optionally filtered by class, optionally with a particular MatchMode
     *
@@ -161,7 +161,7 @@ public interface IIdentifiableDao <T extends IdentifiableEntity> extends IAnnota
     *            authorTeam.persistentTitleCache
     * @return a List of instances of type T matching the queryString
     */
-   public List<T> findByReferenceTitle(Class<? extends T> clazz, String queryString,MatchMode matchmode, List<Criterion> criteria, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths);
+   public <S extends T> List<S> findByReferenceTitle(Class<S> clazz, String queryString,MatchMode matchmode, List<Criterion> criteria, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths);
 
         /**
         * Return a List of objects matching the given query string, optionally filtered by class, optionally with a particular MatchMode
@@ -179,7 +179,7 @@ public interface IIdentifiableDao <T extends IdentifiableEntity> extends IAnnota
         *            authorTeam.persistentTitleCache
         * @return a List of instances of type T matching the queryString
         */
-       public List<T> findByReferenceTitleWithRestrictions(Class<? extends T> clazz, String queryString,MatchMode matchmode, List<Restriction<?>> restrictions, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths);
+       public <S extends T> List<S> findByReferenceTitleWithRestrictions(Class<S> clazz, String queryString,MatchMode matchmode, List<Restriction<?>> restrictions, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths);
 
        /**
      * Return a count of objects matching the given query string in the titleCache, optionally filtered by class, optionally with a particular MatchMode
index 90a111f5867eb793b4de755fc784807f1a3ba582..c468d1fed002b3188f405a7143c02694130ca47c 100644 (file)
@@ -42,6 +42,7 @@ public interface ISearchableDao<T extends CdmBase> {
         * @see <a href="http://lucene.apache.org/java/2_4_0/queryparsersyntax.html">Apache Lucene - Query Parser Syntax</a>
         */
        public List<T> search(Class<? extends T> clazz, String queryString, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths);
+//     public <S extends T> List<S> search(Class<S> clazz, String queryString, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths);
 
        /**
         * Suggest a query that will return hits based upon an existing lucene query string (that is presumably misspelt and returns no hits)
index 0b323f18b505e947d5eff3cca2fbd44be599b4c9..c517fcdb09a491c01ead54c1a0770fbab9c37284 100644 (file)
@@ -1032,7 +1032,7 @@ public abstract class CdmEntityDaoBase<T extends CdmBase> extends DaoBase implem
      * @return\r
      */\r
     @Override\r
-    public List<T> findByParam(Class<? extends T> clazz, String param, String queryString, MatchMode matchmode,\r
+    public <S extends T> List<S> findByParam(Class<S> clazz, String param, String queryString, MatchMode matchmode,\r
             List<Criterion> criterion, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints,\r
             List<String> propertyPaths) {\r
 \r
@@ -1066,7 +1066,7 @@ public abstract class CdmEntityDaoBase<T extends CdmBase> extends DaoBase implem
         addOrder(criteria, orderHints);\r
 \r
         @SuppressWarnings("unchecked")\r
-        List<T> result = criteria.list();\r
+        List<S> result = criteria.list();\r
         defaultBeanInitializer.initializeAll(result, propertyPaths);\r
         return result;\r
     }\r
@@ -1150,7 +1150,7 @@ public abstract class CdmEntityDaoBase<T extends CdmBase> extends DaoBase implem
 \r
 \r
     @Override\r
-    public List<T> findByParamWithRestrictions(Class<? extends T> clazz, String param, String queryString,\r
+    public <S extends T> List<S> findByParamWithRestrictions(Class<S> clazz, String param, String queryString,\r
             MatchMode matchmode, List<Restriction<?>> restrictions, Integer pageSize, Integer pageNumber,\r
             List<OrderHint> orderHints, List<String> propertyPaths) {\r
 \r
@@ -1173,7 +1173,7 @@ public abstract class CdmEntityDaoBase<T extends CdmBase> extends DaoBase implem
         addOrder(criteria, orderHints);\r
 \r
         @SuppressWarnings("unchecked")\r
-        List<T> result = criteria.list();\r
+        List<S> result = criteria.list();\r
         defaultBeanInitializer.initializeAll(result, propertyPaths);\r
         return result;\r
 \r
index 94b7c946a91cc12365fdefd263828b31191d25e7..cf4a6f0194f0f9f404a732c3c1b3563aba068d00 100644 (file)
@@ -88,21 +88,28 @@ public class IdentifiableDaoBase<T extends IdentifiableEntity>
         return results;
     }
 
+    /**
+     * FIXME candidate for removal
+     * @deprecated use {@link #findTitleCache(Class, String, Integer, Integer, List, MatchMode)} instead (or other methods)
+     */
     @Override
+    @Deprecated
     public List<T> findByTitleAndClass(String queryString, Class<T> clazz) {
         checkNotInPriorView("IdentifiableDaoBase.findByTitleAndClass(String queryString, Class<T> clazz)");
         Criteria crit = getSession().createCriteria(clazz);
         crit.add(Restrictions.ilike("titleCache", queryString));
+        @SuppressWarnings("unchecked")
         List<T> results = crit.list();
         return results;
     }
 
     @Override
-    public List<T> findTitleCache(Class<? extends T> clazz, String queryString, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, MatchMode matchMode){
+    public <S extends T> List<S> findTitleCache(Class<S> clazz, String queryString, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, MatchMode matchMode){
 
         Query query = prepareFindTitleCache(clazz, queryString, pageSize,
                 pageNumber, matchMode, false);
-        List<T> result = query.list();
+        @SuppressWarnings("unchecked")
+        List<S> result = query.list();
         return result;
     }
 
@@ -149,22 +156,22 @@ public class IdentifiableDaoBase<T extends IdentifiableEntity>
     }
 
     @Override
-    public List<T> findByTitle(Class<? extends T> clazz, String queryString, MatchMode matchmode, List<Criterion> criterion, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) {
+    public <S extends T> List<S> findByTitle(Class<S> clazz, String queryString, MatchMode matchmode, List<Criterion> criterion, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) {
         return findByParam(clazz, "titleCache", queryString, matchmode, criterion, pageSize, pageNumber, orderHints, propertyPaths);
     }
 
     @Override
-    public List<T> findByReferenceTitle(Class<? extends T> clazz, String queryString, MatchMode matchmode, List<Criterion> criterion, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) {
+    public <S extends T> List<S> findByReferenceTitle(Class<S> clazz, String queryString, MatchMode matchmode, List<Criterion> criterion, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) {
         return findByParam(clazz, "title", queryString, matchmode, criterion, pageSize, pageNumber, orderHints, propertyPaths);
     }
 
     @Override
-    public List<T> findByTitleWithRestrictions(Class<? extends T> clazz, String queryString, MatchMode matchmode, List<Restriction<?>> restrictions, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) {
+    public <S extends T> List<S> findByTitleWithRestrictions(Class<S> clazz, String queryString, MatchMode matchmode, List<Restriction<?>> restrictions, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) {
         return findByParamWithRestrictions(clazz, "titleCache", queryString, matchmode, restrictions, pageSize, pageNumber, orderHints, propertyPaths);
     }
 
     @Override
-    public List<T> findByReferenceTitleWithRestrictions(Class<? extends T> clazz, String queryString, MatchMode matchmode, List<Restriction<?>> restrictions, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) {
+    public <S extends T> List<S> findByReferenceTitleWithRestrictions(Class<S> clazz, String queryString, MatchMode matchmode, List<Restriction<?>> restrictions, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) {
         return findByParamWithRestrictions(clazz, "title", queryString, matchmode, restrictions, pageSize, pageNumber, orderHints, propertyPaths);
     }
 
index 1404a933b3fc813b55b6f1455277e1dee646c507..1a30c1af84370b175f3dcc8c348ef70e6fbcc50a 100644 (file)
@@ -75,13 +75,13 @@ public abstract class VersionableDaoBase<T extends VersionableEntity> extends Cd
        }
 
     @Override
-    public List<T> findByParam(Class<? extends T> clazz, String param, String queryString, MatchMode matchmode, List<Criterion> criterion, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) {
+    public <S extends T> List<S> findByParam(Class<S> clazz, String param, String queryString, MatchMode matchmode, List<Criterion> criterion, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) {
        checkNotInPriorView("IdentifiableDaoBase.findByParam(Class<? extends T> clazz, String queryString, MatchMode matchmode, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths)");
        return super.findByParam(clazz, param, queryString, matchmode, criterion, pageSize, pageNumber, orderHints, propertyPaths);
     }
 
     @Override
-    public List<T> findByParamWithRestrictions(Class<? extends T> clazz, String param, String queryString, MatchMode matchmode, List<Restriction<?>> restrictions, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) {
+    public <S extends T> List<S> findByParamWithRestrictions(Class<S> clazz, String param, String queryString, MatchMode matchmode, List<Restriction<?>> restrictions, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) {
             checkNotInPriorView("IdentifiableDaoBase.findByParam(Class<? extends T> clazz, String queryString, MatchMode matchmode, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths)");
         return super.findByParamWithRestrictions(clazz, param, queryString, matchmode, restrictions, pageSize, pageNumber, orderHints, propertyPaths);
     }
index c114867650a57b5c1b31ebb9ad2dbe36d5271222..2c1eab4da07023c4e846e167ae1102dea07a2299 100644 (file)
@@ -119,6 +119,7 @@ public class DescriptionElementDaoImpl extends AnnotatableDaoImpl<DescriptionEle
 
     @Override
     public List<DescriptionElementBase> search(Class<? extends DescriptionElementBase> clazz, String queryString, Integer pageSize,    Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) {
+//    public <S extends DescriptionElementBase> List<S> search(Class<S> clazz, String queryString, Integer pageSize,  Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) {
         checkNotInPriorView("DescriptionElementDaoImpl.searchTextData(String queryString, Integer pageSize,    Integer pageNumber)");
         QueryParser queryParser = new QueryParser(defaultField, new StandardAnalyzer());
 
index 181665fb6744211cb76b8b8d90cdebde5e741a6b..ba30cd1e62b7ed477380d7c647f177e4bccc7317 100644 (file)
@@ -57,11 +57,11 @@ public class ClassificationDaoHibernateImpl
 \r
     @Override\r
     @SuppressWarnings("unchecked")\r
-    public List<TaxonNode> listRankSpecificRootNodes(Classification classification, Rank rank,\r
+    public List<TaxonNode> listRankSpecificRootNodes(Classification classification, TaxonNode taxonNode, Rank rank,\r
             boolean includeUnpublished, Integer limit, Integer start, List<String> propertyPaths, int queryIndex){\r
 \r
         List<TaxonNode> results = new ArrayList<>();\r
-        Query[] queries = prepareRankSpecificRootNodes(classification, rank, includeUnpublished, false);\r
+        Query[] queries = prepareRankSpecificRootNodes(classification, taxonNode, rank, includeUnpublished, false);\r
 \r
         // since this method is using two queries sequentially the handling of limit and start\r
         // is a bit more complex\r
@@ -86,10 +86,10 @@ public class ClassificationDaoHibernateImpl
     }\r
 \r
     @Override\r
-    public long[] countRankSpecificRootNodes(Classification classification, boolean includeUnpublished, Rank rank) {\r
+    public long[] countRankSpecificRootNodes(Classification classification, TaxonNode subtree, boolean includeUnpublished, Rank rank) {\r
 \r
         long[] result = new long[(rank == null ? 1 : 2)];\r
-        Query[] queries = prepareRankSpecificRootNodes(classification, rank, includeUnpublished, true);\r
+        Query[] queries = prepareRankSpecificRootNodes(classification, subtree, rank, includeUnpublished, true);\r
         int i = 0;\r
         for(Query q : queries) {\r
             result[i++] = (Long)q.uniqueResult();\r
@@ -107,43 +107,57 @@ public class ClassificationDaoHibernateImpl
      *      one or two Queries as array, depending on the <code>rank</code> parameter:\r
      *      <code>rank == null</code>: array with one item, <code>rank != null</code>: array with two items.\r
      */\r
-    private Query[] prepareRankSpecificRootNodes(Classification classification, Rank rank,\r
+    private Query[] prepareRankSpecificRootNodes(Classification classification,\r
+            TaxonNode subtree, Rank rank,\r
             boolean includeUnpublished, boolean doCount) {\r
         Query query1;\r
         Query query2 = null;\r
 \r
         String whereClassification = classification != null? " AND tn.classification = :classification " : "";\r
         String whereUnpublished = includeUnpublished? "" : " AND tn.taxon.publish = :publish ";\r
+        String whereSubtree = subtree != null ? " AND tn.treeIndex like :treeIndexLike " : "";\r
+        TreeIndex treeIndex = TreeIndex.NewInstance(subtree);\r
+        String whereHighest =\r
+                treeIndex == null ? " tn.parent.parent = null ":\r
+                treeIndex.isTreeRoot() ? " tn.parent.treeIndex = :treeIndex ":\r
+                            " tn.treeIndex = :treeIndex "   ;\r
 \r
         String selectWhat = doCount ? "COUNT(distinct tn)" : "DISTINCT tn";\r
 \r
         String joinFetch = doCount ? "" : " JOIN FETCH tn.taxon t JOIN FETCH t.name n LEFT JOIN FETCH n.rank LEFT JOIN FETCH t.sec ";\r
 \r
         if(rank == null){\r
-            String hql = "SELECT " + selectWhat + " FROM TaxonNode tn" +\r
-                    joinFetch +\r
-                    " WHERE tn.parent.parent = null " +\r
-                    whereClassification +  whereUnpublished;\r
+            String hql = "SELECT " + selectWhat +\r
+                    " FROM TaxonNode tn" +\r
+                        joinFetch +\r
+                    " WHERE " + whereHighest +\r
+                    whereClassification + whereUnpublished;\r
             query1 = getSession().createQuery(hql);\r
         } else {\r
             // this is for the cases\r
             //   - exact match of the ranks\r
-            //   - rank of root node is lower but is has no parents\r
-            String hql1 = "SELECT " + selectWhat + " FROM TaxonNode tn " +\r
-                    joinFetch +\r
+            //   - rank of root node is lower but it has no parents\r
+            String hql1 = "SELECT " + selectWhat +\r
+                    " FROM TaxonNode tn " +\r
+                       joinFetch +\r
                     " WHERE " +\r
                     " (tn.taxon.name.rank = :rank" +\r
-                    "   OR (tn.taxon.name.rank.orderIndex > :rankOrderIndex AND tn.parent.parent = null)" +\r
+                    "   OR ((tn.taxon.name.rank.orderIndex > :rankOrderIndex) AND (" + whereHighest + "))" +\r
                     " )"\r
-                    + whereClassification + whereUnpublished ;\r
+                    + whereClassification + whereSubtree + whereUnpublished ;\r
 \r
             // this is for the case\r
             //   - rank of root node is lower and it has a parent with higher rank\r
-            String hql2 = "SELECT " + selectWhat + " FROM TaxonNode tn JOIN tn.parent as parent" +\r
-                    joinFetch +\r
+            String whereParentSubtree = subtree != null ? " AND parent.treeIndex like :treeIndexLike " : "";\r
+            String hql2 = "SELECT " + selectWhat +\r
+                    " FROM TaxonNode tn JOIN tn.parent as parent" +\r
+                       joinFetch +\r
                     " WHERE " +\r
-                    " (tn.taxon.name.rank.orderIndex > :rankOrderIndex AND parent.taxon.name.rank.orderIndex < :rankOrderIndex )"\r
-                    + whereClassification + whereUnpublished;\r
+                    " (tn.taxon.name.rank.orderIndex > :rankOrderIndex "\r
+                    + "     AND parent.taxon.name.rank.orderIndex < :rankOrderIndex )"\r
+                    + whereClassification + whereSubtree\r
+                    + whereParentSubtree + whereUnpublished;\r
+\r
             query1 = getSession().createQuery(hql1);\r
             query2 = getSession().createQuery(hql2);\r
             query1.setParameter("rank", rank);\r
@@ -158,6 +172,15 @@ public class ClassificationDaoHibernateImpl
                 query2.setParameter("classification", classification);\r
             }\r
         }\r
+        if (subtree != null){\r
+            query1.setParameter("treeIndex", subtree.treeIndex());\r
+            if (rank != null){\r
+                query1.setParameter("treeIndexLike", subtree.treeIndex()+"%");\r
+            }\r
+            if(query2 != null) {\r
+                query2.setParameter("treeIndexLike", subtree.treeIndex()+"%");\r
+            }\r
+        }\r
         if (!includeUnpublished){\r
             query1.setBoolean("publish", true);\r
             if(query2 != null) {\r
@@ -173,9 +196,9 @@ public class ClassificationDaoHibernateImpl
     }\r
 \r
     @Override\r
-    public List<TaxonNode> listChildrenOf(Taxon taxon, Classification classification, boolean includeUnpublished,\r
+    public List<TaxonNode> listChildrenOf(Taxon taxon, Classification classification, TaxonNode subtree, boolean includeUnpublished,\r
             Integer pageSize, Integer pageIndex, List<String> propertyPaths){\r
-        Query query = prepareListChildrenOf(taxon, classification, false, includeUnpublished);\r
+        Query query = prepareListChildrenOf(taxon, classification, subtree, false, includeUnpublished);\r
 \r
          setPagingParameter(query, pageSize, pageIndex);\r
 \r
@@ -227,9 +250,9 @@ public class ClassificationDaoHibernateImpl
 \r
 \r
     @Override\r
-    public Long countChildrenOf(Taxon taxon, Classification classification,\r
+    public Long countChildrenOf(Taxon taxon, Classification classification, TaxonNode subtree,\r
             boolean includeUnpublished){\r
-        Query query = prepareListChildrenOf(taxon, classification, true, includeUnpublished);\r
+        Query query = prepareListChildrenOf(taxon, classification, subtree, true, includeUnpublished);\r
         Long count = (Long) query.uniqueResult();\r
         return count;\r
     }\r
@@ -241,7 +264,7 @@ public class ClassificationDaoHibernateImpl
         return count;\r
     }\r
 \r
-    private Query prepareListChildrenOf(Taxon taxon, Classification classification,\r
+    private Query prepareListChildrenOf(Taxon taxon, Classification classification, TaxonNode subtree,\r
             boolean doCount, boolean includeUnpublished){\r
 \r
         String selectWhat = doCount ? "COUNT(cn)" : "cn";\r
@@ -256,12 +279,18 @@ public class ClassificationDaoHibernateImpl
          if (!includeUnpublished){\r
              hql += "  AND cn.taxon.publish = :publish ";\r
          }\r
+         if (subtree != null){\r
+             hql += "  AND tn.treeIndex like :treeIndexLike ";\r
+         }\r
          Query query = getSession().createQuery(hql);\r
          query.setParameter("taxon", taxon);\r
          query.setParameter("classification", classification);\r
          if (!includeUnpublished){\r
              query.setBoolean("publish", Boolean.TRUE);\r
          }\r
+         if (subtree != null){\r
+             query.setParameter("treeIndexLike", subtree.treeIndexLike());\r
+         }\r
          return query;\r
     }\r
 \r
index 1a663a0174df9bceb868f09dafde8e6c80577c1b..0a8b104d741ce0f789873b5134b0c9ed47a11857 100755 (executable)
@@ -151,10 +151,11 @@ public class TaxonDaoHibernateImpl
         return result;
     }
 
+    //TODO needed? Currently only used by tests.
     public List<TaxonBase> getTaxaByName(boolean doTaxa, boolean doSynonyms, boolean includeUnpublished,
             String queryString, MatchMode matchMode, Integer pageSize, Integer pageNumber) {
         return getTaxaByName(doTaxa, doSynonyms, false, false, false,
-                queryString, null, matchMode, null, includeUnpublished, null, pageSize, pageNumber, null);
+                queryString, null, null, matchMode, null, includeUnpublished, null, pageSize, pageNumber, null);
     }
 
     @Override
@@ -175,14 +176,14 @@ public class TaxonDaoHibernateImpl
     @Override
     public List<TaxonBase> getTaxaByName(boolean doTaxa, boolean doSynonyms, boolean doMisappliedNames, boolean doCommonNames,
             boolean includeAuthors,
-            String queryString, Classification classification,
+            String queryString, Classification classification, TaxonNode subtree,
             MatchMode matchMode, Set<NamedArea> namedAreas, boolean includeUnpublished, NameSearchOrder order,
             Integer pageSize, Integer pageNumber, List<String> propertyPaths) {
 
         boolean doCount = false;
 
         String searchField = includeAuthors ? "titleCache" : "nameCache";
-        Query query = prepareTaxaByName(doTaxa, doSynonyms, doMisappliedNames, doCommonNames, includeUnpublished, searchField, queryString, classification, matchMode, namedAreas, order, pageSize, pageNumber, doCount);
+        Query query = prepareTaxaByName(doTaxa, doSynonyms, doMisappliedNames, doCommonNames, includeUnpublished, searchField, queryString, classification, subtree, matchMode, namedAreas, order, pageSize, pageNumber, doCount);
 
         if (query != null){
             @SuppressWarnings({ "unchecked", "rawtypes" })
@@ -195,16 +196,13 @@ public class TaxonDaoHibernateImpl
         }else{
             return new ArrayList<>();
         }
-
-
     }
 
-
     //new search for the editor, for performance issues the return values are only uuid and titleCache, to avoid the initialisation of all objects
     @Override
     @SuppressWarnings("unchecked")
     public List<UuidAndTitleCache<? extends IdentifiableEntity>> getTaxaByNameForEditor(boolean doTaxa, boolean doSynonyms, boolean doNamesWithoutTaxa,
-            boolean doMisappliedNames, boolean doCommonNames, boolean includeUnpublished, String queryString, Classification classification,
+            boolean doMisappliedNames, boolean doCommonNames, boolean includeUnpublished, String queryString, Classification classification, TaxonNode subtree,
             MatchMode matchMode, Set<NamedArea> namedAreas, NameSearchOrder order) {
 
         if (order == null){
@@ -230,7 +228,7 @@ public class TaxonDaoHibernateImpl
                }
         }
         Query query = prepareTaxaByNameForEditor(doTaxa, doSynonyms, doMisappliedNames, doCommonNames, includeUnpublished,
-                "nameCache", queryString, classification, matchMode, namedAreas, doCount, order);
+                "nameCache", queryString, classification, subtree, matchMode, namedAreas, doCount, order);
 
         if (query != null){
             List<Object[]> results = query.list();
@@ -291,11 +289,11 @@ public class TaxonDaoHibernateImpl
      *
      */
     private Query prepareTaxaByNameForEditor(boolean doTaxa, boolean doSynonyms, boolean doMisappliedNames, boolean doCommonNames,
-            boolean includeUnpublished, String searchField, String queryString, Classification classification,
+            boolean includeUnpublished, String searchField, String queryString, Classification classification, TaxonNode subtree,
             MatchMode matchMode, Set<NamedArea> namedAreas, boolean doCount, NameSearchOrder order) {
         return prepareByNameQuery(doTaxa, doSynonyms, doMisappliedNames, doCommonNames, includeUnpublished,
                 searchField, queryString,
-                classification, matchMode, namedAreas, order, doCount, true);
+                classification, subtree, matchMode, namedAreas, order, doCount, true);
     }
 
 
@@ -320,7 +318,7 @@ public class TaxonDaoHibernateImpl
      */
     private Query prepareByNameQuery(boolean doTaxa, boolean doSynonyms, boolean doMisappliedNames,
                 boolean doCommonNames, boolean includeUnpublished, String searchField, String queryString,
-                Classification classification, MatchMode matchMode, Set<NamedArea> namedAreas,
+                Classification classification, TaxonNode subtree, MatchMode matchMode, Set<NamedArea> namedAreas,
                 NameSearchOrder order, boolean doCount, boolean returnIdAndTitle){
 
             boolean doProParteSynonyms = doSynonyms;  //we may distinguish in future
@@ -356,7 +354,7 @@ public class TaxonDaoHibernateImpl
             }
 
             Subselects subSelects = createByNameHQLString(doConceptRelations,
-                    includeUnpublished, classification, areasExpanded, matchMode, searchField);
+                    includeUnpublished, classification, subtree, areasExpanded, matchMode, searchField);
             String taxonSubselect = subSelects.taxonSubselect;
             String synonymSubselect = subSelects.synonymSubselect;
             String conceptSelect = subSelects.conceptSelect;
@@ -375,7 +373,7 @@ public class TaxonDaoHibernateImpl
                 // find Taxa
                 Query subTaxon = getSearchQueryString(hqlQueryString, taxonSubselect);
 
-                addRestrictions(doAreaRestriction, classification, includeUnpublished,
+                addRestrictions(doAreaRestriction, classification, subtree, includeUnpublished,
                         namedAreasUuids, subTaxon);
                 taxonIDs = subTaxon.list();
             }
@@ -383,7 +381,7 @@ public class TaxonDaoHibernateImpl
             if(doSynonyms){
                 // find synonyms
                 Query subSynonym = getSearchQueryString(hqlQueryString, synonymSubselect);
-                addRestrictions(doAreaRestriction, classification, includeUnpublished, namedAreasUuids,subSynonym);
+                addRestrictions(doAreaRestriction, classification, subtree, includeUnpublished, namedAreasUuids,subSynonym);
                 synonymIDs = subSynonym.list();
             }
             if (doConceptRelations ){
@@ -396,14 +394,14 @@ public class TaxonDaoHibernateImpl
                     relTypeSet.addAll(TaxonRelationshipType.allSynonymTypes());
                 }
                 subMisappliedNames.setParameterList("rTypeSet", relTypeSet);
-                addRestrictions(doAreaRestriction, classification, includeUnpublished, namedAreasUuids, subMisappliedNames);
+                addRestrictions(doAreaRestriction, classification, subtree, includeUnpublished, namedAreasUuids, subMisappliedNames);
                 taxonIDs.addAll(subMisappliedNames.list());
             }
 
             if(doCommonNames){
                 // find Taxa
                 Query subCommonNames = getSearchQueryString(hqlQueryString, commonNameSubSelect);
-                addRestrictions(doAreaRestriction, classification, includeUnpublished, namedAreasUuids, subCommonNames);
+                addRestrictions(doAreaRestriction, classification, subtree, includeUnpublished, namedAreasUuids, subCommonNames);
                 taxonIDs.addAll(subCommonNames.list());
             }
 
@@ -513,10 +511,11 @@ public class TaxonDaoHibernateImpl
      * @param includeUnpublished
      * @param classification
      * @param doAreaRestriction
+     * @param subtree
      * @param namedAreasUuids
      * @param subTaxon
      */
-    protected void addRestrictions(boolean doAreaRestriction, Classification classification, boolean includeUnpublished,
+    protected void addRestrictions(boolean doAreaRestriction, Classification classification, TaxonNode subtree, boolean includeUnpublished,
             Set<UUID> namedAreasUuids, Query query) {
         if(doAreaRestriction){
             query.setParameterList("namedAreasUuids", namedAreasUuids);
@@ -524,6 +523,9 @@ public class TaxonDaoHibernateImpl
         if(classification != null){
             query.setParameter("classification", classification);
         }
+        if(subtree != null){
+            query.setParameter("treeIndexLike", subtree.treeIndex() + "%");
+        }
         if(!includeUnpublished){
             query.setBoolean("publish", true);
         }
@@ -546,10 +548,10 @@ public class TaxonDaoHibernateImpl
      */
     private Query prepareTaxaByName(boolean doTaxa, boolean doSynonyms, boolean doMisappliedNames,
             boolean doCommonNames, boolean includeUnpublished, String searchField, String queryString,
-            Classification classification, MatchMode matchMode, Set<NamedArea> namedAreas, NameSearchOrder order, Integer pageSize, Integer pageNumber, boolean doCount) {
+            Classification classification, TaxonNode subtree, MatchMode matchMode, Set<NamedArea> namedAreas, NameSearchOrder order, Integer pageSize, Integer pageNumber, boolean doCount) {
 
         Query query = prepareByNameQuery(doTaxa, doSynonyms, doMisappliedNames, doCommonNames, includeUnpublished,
-                searchField, queryString, classification, matchMode, namedAreas, order, doCount, false);
+                searchField, queryString, classification, subtree, matchMode, namedAreas, order, doCount, false);
 
         if(pageSize != null && !doCount && query != null) {
             query.setMaxResults(pageSize);
@@ -592,7 +594,7 @@ public class TaxonDaoHibernateImpl
 
     @Override
     public long countTaxaByName(boolean doTaxa, boolean doSynonyms, boolean doMisappliedNames, boolean doCommonNames,
-            boolean doIncludeAuthors, String queryString, Classification classification,
+            boolean doIncludeAuthors, String queryString, Classification classification, TaxonNode subtree,
         MatchMode matchMode, Set<NamedArea> namedAreas, boolean includeUnpublished) {
 
         boolean doCount = true;
@@ -608,7 +610,7 @@ public class TaxonDaoHibernateImpl
         String searchField = doIncludeAuthors ? "titleCache": "nameCache";
 
         Query query = prepareTaxaByName(doTaxa, doSynonyms, doMisappliedNames, doCommonNames, includeUnpublished,
-                searchField, queryString, classification, matchMode, namedAreas, null, null, null, doCount);
+                searchField, queryString, classification, subtree, matchMode, namedAreas, null, null, null, doCount);
         if (query != null) {
             return (Long)query.uniqueResult();
         }else{
@@ -622,11 +624,11 @@ public class TaxonDaoHibernateImpl
      * @param areaQuery
      */
     private void expandNamedAreas(Collection<NamedArea> namedAreas, Set<NamedArea> areasExpanded, Query areaQuery) {
-        List<NamedArea> childAreas;
         for(NamedArea a : namedAreas){
             areasExpanded.add(a);
             areaQuery.setParameter("area", a);
-            childAreas = areaQuery.list();
+            @SuppressWarnings("unchecked")
+            List<NamedArea> childAreas = areaQuery.list();
             if(childAreas.size() > 0){
                 areasExpanded.addAll(childAreas);
                 expandNamedAreas(childAreas, areasExpanded, areaQuery);
@@ -665,10 +667,10 @@ public class TaxonDaoHibernateImpl
     }
 
     @Override
-    public List<TaxonBase> findByNameTitleCache(boolean doTaxa, boolean doSynonyms, boolean includeUnpublished, String queryString, Classification classification, MatchMode matchMode, Set<NamedArea> namedAreas, NameSearchOrder order, Integer pageNumber, Integer pageSize, List<String> propertyPaths) {
+    public List<TaxonBase> findByNameTitleCache(boolean doTaxa, boolean doSynonyms, boolean includeUnpublished, String queryString, Classification classification, TaxonNode subtree, MatchMode matchMode, Set<NamedArea> namedAreas, NameSearchOrder order, Integer pageNumber, Integer pageSize, List<String> propertyPaths) {
 
         boolean doCount = false;
-        Query query = prepareTaxaByName(doTaxa, doSynonyms, false, false, includeUnpublished, "titleCache", queryString, classification, matchMode, namedAreas, order, pageSize, pageNumber, doCount);
+        Query query = prepareTaxaByName(doTaxa, doSynonyms, false, false, includeUnpublished, "titleCache", queryString, classification, subtree, matchMode, namedAreas, order, pageSize, pageNumber, doCount);
         if (query != null){
             @SuppressWarnings({ "unchecked", "rawtypes" })
             List<TaxonBase> results = query.list();
@@ -1006,9 +1008,12 @@ public class TaxonDaoHibernateImpl
     private String prepareTaxonRelationshipQuery(Set<TaxonRelationshipType> types, boolean includeUnpublished,
             Direction direction, boolean isCount) {
         String selectStr = isCount? " count(rel) as n ":" rel ";
-        String result = "SELECT " + selectStr +
-             " FROM TaxonRelationship rel " +
-             " WHERE rel."+direction+" = :relatedTaxon";
+        String result = "SELECT " + selectStr + " FROM TaxonRelationship rel ";
+        if(direction != null){
+            result += " WHERE rel."+direction+" = :relatedTaxon";
+        } else {
+            result += " WHERE (rel.relatedFrom = :relatedTaxon OR rel.relatedTo = :relatedTaxon )";
+        }
         if (types != null){
             result += " AND rel.type IN (:types) ";
         }
@@ -1441,16 +1446,19 @@ public class TaxonDaoHibernateImpl
     }
 
     private Subselects createByNameHQLString(boolean doConceptRelations,
-                boolean includeUnpublished, Classification classification,  Set<NamedArea> areasExpanded,
-                MatchMode matchMode, String searchField){
+                boolean includeUnpublished, Classification classification, TaxonNode subtree,
+                Set<NamedArea> areasExpanded, MatchMode matchMode, String searchField){
+
 
         boolean doAreaRestriction = areasExpanded.size() > 0;
+        boolean hasTaxonNodeFilter = classification != null || subtree != null;
+
         String doAreaRestrictionSubSelect =
                      " SELECT %s.id "
                    + " FROM Distribution e "
                    + "    JOIN e.inDescription d "
                    + "    JOIN d.taxon t " +
-                (classification != null ? " JOIN t.taxonNodes AS tn " : " ");
+                (hasTaxonNodeFilter ? " JOIN t.taxonNodes AS tn " : " ");
 
         String doAreaRestrictionConceptRelationSubSelect =
                    "SELECT %s.id "
@@ -1460,7 +1468,7 @@ public class TaxonDaoHibernateImpl
 
         String doTaxonSubSelect =
                      " SELECT %s.id "
-                   + " FROM Taxon t " + (classification != null ? " "
+                   + " FROM Taxon t " + (hasTaxonNodeFilter ? " "
                            + " JOIN t.taxonNodes AS tn " : " ");
 
         String doTaxonMisappliedNameSubSelect =
@@ -1476,7 +1484,7 @@ public class TaxonDaoHibernateImpl
         String doConceptRelationJoin =
                    " LEFT JOIN t.relationsFromThisTaxon AS rft " +
                    " LEFT JOIN rft.relatedTo AS rt " +
-                      (classification != null ? " LEFT JOIN rt.taxonNodes AS tn2 " : " ") +
+                      (hasTaxonNodeFilter ? " LEFT JOIN rt.taxonNodes AS tn2 " : " ") +
                    " LEFT JOIN rt.name AS n2" +
                    " LEFT JOIN rft.type as rtype";
 
@@ -1486,8 +1494,11 @@ public class TaxonDaoHibernateImpl
                    " LEFT JOIN com.feature f ";
 
 
-        String doClassificationWhere = " tn.classification = :classification";
-        String doClassificationForConceptRelationsWhere = " tn2.classification = :classification";
+        String doTreeWhere = classification == null ? "" : " AND tn.classification = :classification";
+        String doTreeForConceptRelationsWhere = classification == null ? "": " AND tn2.classification = :classification";
+
+        String doSubtreeWhere = subtree == null? "":" AND tn.treeIndex like :treeIndexLike";
+        String doSubtreeForConceptRelationsWhere = subtree == null? "":" AND tn2.treeIndex like :treeIndexLike";
 
         String doAreaRestrictionWhere =  " e.area.uuid in (:namedAreasUuids)";
         String doCommonNamesRestrictionWhere = " (f.supportsCommonTaxonName = true and com.name "+matchMode.getMatchOperator()+" :queryString )";
@@ -1501,31 +1512,31 @@ public class TaxonDaoHibernateImpl
         String conceptSelect = null;
         String commonNameSubselect = null;
 
-        if(classification != null ){
+        if(hasTaxonNodeFilter){
             if (!doConceptRelations){
                 if(doAreaRestriction){
                     taxonSubselect = String.format(doAreaRestrictionSubSelect, "t") + doTaxonNameJoin +
-                            " WHERE " + doAreaRestrictionWhere +
-                            "  AND " + doClassificationWhere +
+                            " WHERE (1=1) AND " + doAreaRestrictionWhere +
+                                doTreeWhere + doSubtreeWhere +
                             "  AND " + String.format(doSearchFieldWhere, "n");
                     synonymSubselect = String.format(doAreaRestrictionSubSelect, "s") + doSynonymNameJoin +
-                            " WHERE " + doAreaRestrictionWhere +
-                            "  AND " + doClassificationWhere +
+                            " WHERE (1=1) AND " + doAreaRestrictionWhere +
+                                doTreeWhere + doSubtreeWhere +
                             "  AND " + String.format(doSearchFieldWhere, "sn");
                     commonNameSubselect =  String.format(doAreaRestrictionSubSelect, "t") + doCommonNamesJoin +
-                            " WHERE " +  doAreaRestrictionWhere +
-                            "  AND " + doClassificationWhere +
+                            " WHERE (1=1) AND " +  doAreaRestrictionWhere +
+                                 doTreeWhere + doSubtreeWhere +
                             "  AND " + String.format(doSearchFieldWhere, "n") +
                             "  AND " + doCommonNamesRestrictionWhere;
                 } else {//no area restriction
                     taxonSubselect = String.format(doTaxonSubSelect, "t" )+ doTaxonNameJoin +
-                            " WHERE " + doClassificationWhere +
+                            " WHERE (1=1) " + doTreeWhere + doSubtreeWhere +
                             "  AND " + String.format(doSearchFieldWhere, "n");
                     synonymSubselect = String.format(doTaxonSubSelect, "s" ) + doSynonymNameJoin +
-                            " WHERE " + doClassificationWhere +
+                            " WHERE (1=1) " + doTreeWhere + doSubtreeWhere +
                             "  AND " + String.format(doSearchFieldWhere, "sn");
                     commonNameSubselect =String.format(doTaxonSubSelect, "t" )+ doCommonNamesJoin +
-                            " WHERE " + doClassificationWhere +
+                            " WHERE (1=1) " + doTreeWhere + doSubtreeWhere +
                             "  AND " + doCommonNamesRestrictionWhere;
                 }
             }else{ //concept relations included
@@ -1533,37 +1544,37 @@ public class TaxonDaoHibernateImpl
                     conceptSelect = String.format(doAreaRestrictionConceptRelationSubSelect, "t") + doTaxonNameJoin + doConceptRelationJoin  +
                             " WHERE " + doAreaRestrictionWhere +
                             "  AND " + String.format(doSearchFieldWhere, "n") +
-                            "  AND " + doClassificationForConceptRelationsWhere +
+                                 doTreeForConceptRelationsWhere + doSubtreeForConceptRelationsWhere +
                             "  AND " + doRelationshipTypeComparison;
                     taxonSubselect = String.format(doAreaRestrictionSubSelect, "t") + doTaxonNameJoin +
                             " WHERE " + doAreaRestrictionWhere +
                             "  AND " + String.format(doSearchFieldWhere, "n") +
-                            "  AND " + doClassificationWhere;
+                                doTreeWhere + doSubtreeWhere;
                     synonymSubselect = String.format(doAreaRestrictionSubSelect, "s") + doSynonymNameJoin +
                             " WHERE " + doAreaRestrictionWhere +
-                            "  AND " + doClassificationWhere +
+                                doTreeWhere + doSubtreeWhere +
                             "  AND " + String.format(doSearchFieldWhere, "sn");
                     commonNameSubselect= String.format(doAreaRestrictionSubSelect, "t")+ doCommonNamesJoin +
                             " WHERE " + doAreaRestrictionWhere +
-                            "  AND " + doClassificationWhere +
+                                doTreeWhere + doSubtreeWhere +
                             "  AND " + doCommonNamesRestrictionWhere;
                 } else {//no area restriction
                     conceptSelect = String.format(doTaxonMisappliedNameSubSelect, "t" ) + doTaxonNameJoin + doConceptRelationJoin +
                             " WHERE " + String.format(doSearchFieldWhere, "n") +
-                            "  AND " + doClassificationForConceptRelationsWhere +
+                                  doTreeForConceptRelationsWhere + doSubtreeForConceptRelationsWhere +
                             "  AND " + doRelationshipTypeComparison;
                     taxonSubselect = String.format(doTaxonSubSelect, "t" ) + doTaxonNameJoin +
                             " WHERE " +  String.format(doSearchFieldWhere, "n") +
-                            " AND "+ doClassificationWhere;
+                                 doTreeWhere + doSubtreeWhere;
                     synonymSubselect = String.format(doTaxonSubSelect, "s" ) + doSynonymNameJoin +
-                            " WHERE " + doClassificationWhere +
+                            " WHERE (1=1) " + doTreeWhere + doSubtreeWhere +
                             "  AND " +  String.format(doSearchFieldWhere, "sn");
                     commonNameSubselect= String.format(doTaxonSubSelect, "t")+ doCommonNamesJoin +
-                            " WHERE " + doClassificationWhere +
+                            " WHERE (1=1) " + doTreeWhere + doSubtreeWhere +
                             "  AND " + doCommonNamesRestrictionWhere;
                 }
             }
-        } else { //classification = null
+        } else { //classification = null && subtree = null
             if(doAreaRestriction){
                 conceptSelect = String.format(doAreaRestrictionConceptRelationSubSelect, "t") + doTaxonNameJoin + doConceptRelationJoin +
                         " WHERE " + doAreaRestrictionWhere +
index 6acfc87091de533b99a90413a9a5fd40bfbe0a28..ae24c5315c8fb9fee8be72599bfd7e13201e7bf4 100755 (executable)
@@ -275,7 +275,7 @@ public class TaxonNodeDaoHibernateImpl extends AnnotatableDaoImpl<TaxonNode>
                defaultBeanInitializer.initializeAll(results, propertyPaths);\r
                return results;\r
        }else{\r
-               return classificationDao.listChildrenOf(node.getTaxon(), node.getClassification(),\r
+               return classificationDao.listChildrenOf(node.getTaxon(), node.getClassification(), null,\r
                       includeUnpublished, pageSize, pageIndex, propertyPaths);\r
        }\r
 \r
@@ -291,7 +291,7 @@ public class TaxonNodeDaoHibernateImpl extends AnnotatableDaoImpl<TaxonNode>
                return ((Integer)crit.uniqueResult().hashCode()).longValue();\r
                }else{\r
                        return classificationDao.countChildrenOf(\r
-                               node.getTaxon(), classification, includeUnpublished);\r
+                               node.getTaxon(), classification, null, includeUnpublished);\r
                }\r
        }\r
     /**\r
index a84685376a36d51fc8fe3ee00d76813b60aaa2ae..8654616e46757cf87e10b2fce8ae2fa946e1559f 100644 (file)
@@ -61,15 +61,15 @@ public interface IClassificationDao extends IIdentifiableDao<Classification> {
      *            <code>rank != null</code>.\r
      * @return\r
      */\r
-    public List<TaxonNode> listRankSpecificRootNodes(Classification classification, Rank rank,\r
+    public List<TaxonNode> listRankSpecificRootNodes(Classification classification, TaxonNode subtree, Rank rank,\r
             boolean includeUnpublished, Integer limit, Integer start, List<String> propertyPaths, int queryIndex);\r
 \r
-    public long[] countRankSpecificRootNodes(Classification classification, boolean includeUnpublished, Rank rank);\r
+    public long[] countRankSpecificRootNodes(Classification classification, TaxonNode subtree, boolean includeUnpublished, Rank rank);\r
 \r
-    public List<TaxonNode> listChildrenOf(Taxon taxon, Classification classification, boolean includeUnpublished,\r
+    public List<TaxonNode> listChildrenOf(Taxon taxon, Classification classification, TaxonNode subtree, boolean includeUnpublished,\r
             Integer pageSize, Integer pageIndex, List<String> propertyPaths);\r
 \r
-    public Long countChildrenOf(Taxon taxon, Classification classification, boolean includeUnpublished);\r
+    public Long countChildrenOf(Taxon taxon, Classification classification, TaxonNode subtree, boolean includeUnpublished);\r
 \r
     public TaxonNode getRootNode(UUID classificationUuid);\r
 \r
index 6d43e155457480d2f5040e7c3db3d157769d1584..4a9c67b17e52311d50b23124ad7e1df2d94b1779 100644 (file)
@@ -96,6 +96,7 @@ public interface ITaxonDao
      * @param doSynonyms
      * @param queryString
      * @param classification TODO
+     * @param subtree
      * @param matchMode
      * @param namedAreas TODO
      * @param pageSize
@@ -104,7 +105,7 @@ public interface ITaxonDao
      * @return list of found taxa
      */
     public List<TaxonBase> getTaxaByName(boolean doTaxa, boolean doSynonyms, boolean doMisappliedNames, boolean doCommonNames,
-            boolean includeAuthors, String queryString, Classification classification,
+            boolean includeAuthors, String queryString, Classification classification, TaxonNode subtree,
             MatchMode matchMode, Set<NamedArea> namedAreas, boolean includeUnpublished,
             NameSearchOrder order, Integer pageSize, Integer pageNumber, List<String> propertyPaths);
 
@@ -113,6 +114,7 @@ public interface ITaxonDao
      * @param doSynonyms
      * @param queryString
      * @param classification TODO
+     * @param subtree
      * @param matchMode
      * @param namedAreas
      * @param pageSize
@@ -121,27 +123,9 @@ public interface ITaxonDao
      * @return
      */
     public long countTaxaByName(boolean doTaxa, boolean doSynonyms, boolean doMisappliedNames, boolean doCommonNames,
-            boolean doIncludeAuthors, String queryString, Classification classification,
+            boolean doIncludeAuthors, String queryString, Classification classification, TaxonNode subtree,
             MatchMode matchMode, Set<NamedArea> namedAreas, boolean includeUnpublished);
 
-//     /**
-//      * @param queryString
-//      * @param matchMode
-//      * @param accepted
-//      * @return
-//      */
-//     public Integer countTaxaByName(String queryString, MatchMode matchMode,
-//                     Boolean accepted);
-
-//     /**
-//      * Returns a count of TaxonBase instances where the
-//      * taxon.name properties match the parameters passed.
-//      *
-//      * @param queryString search string
-//      * @param matchMode way how search string shall be matched: exact, beginning, or anywhere
-//      * @param selectModel all taxon base, taxa, or synonyms
-//      */
-//     public Integer countTaxaByName(String queryString, MatchMode matchMode, SelectMode selectMode);
 
     /**
      * Returns a count of TaxonBase instances where the
@@ -201,7 +185,7 @@ public interface ITaxonDao
      * @return
      */
     public List<TaxonBase> findByNameTitleCache(boolean doTaxa, boolean doSynonyms, boolean includeUnpublished,
-            String queryString, Classification classification, MatchMode matchMode, Set<NamedArea> namedAreas,
+            String queryString, Classification classification, TaxonNode subtree, MatchMode matchMode, Set<NamedArea> namedAreas,
             NameSearchOrder order, Integer pageNumber, Integer pageSize, List<String> propertyPaths) ;
 
     /**
@@ -368,7 +352,7 @@ public interface ITaxonDao
 
     public List<UuidAndTitleCache<? extends IdentifiableEntity>> getTaxaByNameForEditor(boolean doTaxa, boolean doSynonyms, boolean doNamesWithoutTaxa,
             boolean doMisappliedNames, boolean doCommonNames, boolean includeUnpublished,
-            String queryString, Classification classification,
+            String queryString, Classification classification, TaxonNode subtree,
             MatchMode matchMode, Set<NamedArea> namedAreas, NameSearchOrder order);
 
     public List<String> taxaByNameNotInDB(List<String> taxonNames);
index f2ae1f9f4d6ac173f7880ec1614b9ae8a1eb2636..926c9ace72f292fd0f4c25bc2d447f23d5756018 100644 (file)
@@ -16,7 +16,6 @@ import eu.etaxonomy.cdm.model.common.ICdmBase;
 /**
  * @author cmathew
  * @since 7 Oct 2015
- *
  */
 public class MergeResult<T extends ICdmBase> implements Serializable {
 
index 05653164bf5fea6d9a98db55c77416f8a475912b..88272c5a7cbc73a515d9c9f60148496443fa5bc4 100644 (file)
@@ -60,11 +60,24 @@ public class TargetEntityStates {
         return previousState != null;
     }
 
+    /**
+     * Compares the current state of the entity property (state being persisted) with the previous state
+     * (state to be overwritten in the storage) and returns <code>true</code> in case there is a previous
+     * state and the new state is different.
+     *
+     * @param propertyName
+     * @return
+     */
     public boolean propertyChanged(String propertyName){
         if(propertyNames == null){
             // usually during a save or delete operation
             return false;
         }
+        if(!hasPreviousState()){
+            // should be covered by propertyNames == null but this check seems to be nececary in rare situations
+            // see the NPE stack strace in #7702 for an example
+            return false;
+        }
         int i = 0;
         for(String p : propertyNames){
             if(p.equals(propertyName)){
index 4c4caf71a681878c028443ff7b301d43c82145e5..755fbe5c6d9945c0984a1b435536cba981c073fd 100644 (file)
@@ -53,10 +53,10 @@ public class RegistrationVoter extends CdmPermissionVoter {
 
             RegistrationStatus status;
             if(targetEntityStates.propertyChanged("status")){
-                status = targetEntityStates.previousPropertyState("status", RegistrationStatus.class); 
+                status = targetEntityStates.previousPropertyState("status", RegistrationStatus.class);
             } else {
                 status = ((Registration)targetEntityStates.getEntity()).getStatus();
-                
+
             }
             vr.isPropertyMatch = cdmAuthority.getProperty().contains(status.name());
             logger.debug("property is matching");
index db15aa28e1bd5b4f8ad44dda2a8b88a33e49fc5d..0c8f3d80faadbbb57c768edb7336a97fbfa37677 100644 (file)
@@ -31,6 +31,7 @@ import eu.etaxonomy.cdm.model.taxon.TaxonNode;
 import eu.etaxonomy.cdm.persistence.dao.reference.IReferenceDao;
 import eu.etaxonomy.cdm.persistence.dao.taxon.IClassificationDao;
 import eu.etaxonomy.cdm.persistence.dao.taxon.ITaxonDao;
+import eu.etaxonomy.cdm.persistence.dao.taxon.ITaxonNodeDao;
 import eu.etaxonomy.cdm.persistence.dto.ClassificationLookupDTO;
 import eu.etaxonomy.cdm.test.integration.CdmTransactionalIntegrationTest;
 import eu.etaxonomy.cdm.test.unitils.CleanSweepInsertLoadStrategy;
@@ -48,14 +49,17 @@ public class ClassificationDaoHibernateImplTest extends CdmTransactionalIntegrat
     private IClassificationDao classificationDao;
     @SpringBeanByType
     private IReferenceDao referenceDao;
+    @SpringBeanByType
+    private ITaxonNodeDao taxonNodeDao;
 
     private boolean includeUnpublished;
 
-    private static final String CLASSIFICATION_UUID = "2a5ceebb-4830-4524-b330-78461bf8cb6b";
-    private static final String CLASSIFICATION_FULL_UUID = "a71467a6-74dc-4148-9530-484628a5ab0e";
+    private static final UUID FLAT_CLASSIFICATION_UUID = UUID.fromString("2a5ceebb-4830-4524-b330-78461bf8cb6b");
+    private static final UUID CLASSIFICATION_FULL_UUID = UUID.fromString("a71467a6-74dc-4148-9530-484628a5ab0e");
     private static final UUID UUID_ABIES = UUID.fromString("19f560d9-a555-4883-9c54-39d04872307c");
     private static final UUID UUID_PINACEAE = UUID.fromString("74216ed8-5f04-439e-87e0-500738f5e7fc");
-
+    private static final UUID UUID_ABIES_NODE = UUID.fromString("56b10cf0-9522-407e-9f90-0c2dba263c94");
+    private static final UUID UUID_FLAT_ROOT = UUID.fromString("75202d4e-b2aa-4343-8b78-340a52d15c40");
 
     @Before
     public void setUp() {
@@ -72,21 +76,21 @@ public class ClassificationDaoHibernateImplTest extends CdmTransactionalIntegrat
 
         checkPreconditions();
 
-        Classification classification = classificationDao.load(UUID.fromString(CLASSIFICATION_UUID));
+        Classification classification = classificationDao.load(FLAT_CLASSIFICATION_UUID);
 
         includeUnpublished = true;
         // test for the bug in http://dev.e-taxonomy.eu/trac/ticket/2778
         Rank rank = Rank.GENUS();
         // run both queries in dao method since rank != null
-        List<TaxonNode> rootNodes = classificationDao.listRankSpecificRootNodes(classification, rank, includeUnpublished,
+        List<TaxonNode> rootNodes = classificationDao.listRankSpecificRootNodes(classification, null, rank, includeUnpublished,
                 null, null, null, 0);
-        rootNodes.addAll(classificationDao.listRankSpecificRootNodes(classification, rank, includeUnpublished,
+        rootNodes.addAll(classificationDao.listRankSpecificRootNodes(classification, null, rank, includeUnpublished,
                 null, null, null, 1));
         assertEquals(3, rootNodes.size());
 
         rank = null;
         // run only fist query in dao method since rank == null
-        rootNodes = classificationDao.listRankSpecificRootNodes(classification, rank, includeUnpublished,
+        rootNodes = classificationDao.listRankSpecificRootNodes(classification, null, rank, includeUnpublished,
                 null, null, null, 0);
         assertEquals("The absolut root nodes should be returned", 3, rootNodes.size());
 
@@ -95,14 +99,14 @@ public class ClassificationDaoHibernateImplTest extends CdmTransactionalIntegrat
 
         rank = Rank.GENUS();
         // run both queries in dao method since rank != null
-        rootNodes = classificationDao.listRankSpecificRootNodes(classification, rank, includeUnpublished,
+        rootNodes = classificationDao.listRankSpecificRootNodes(classification, null, rank, includeUnpublished,
                 null, null, null, 0);
-        rootNodes.addAll(classificationDao.listRankSpecificRootNodes(classification, rank, includeUnpublished,
+        rootNodes.addAll(classificationDao.listRankSpecificRootNodes(classification, null, rank, includeUnpublished,
                 null, null, null, 1));
         assertEquals(2, rootNodes.size());  //5002 in unpublished
 
         rank = null;
-        rootNodes = classificationDao.listRankSpecificRootNodes(classification, rank, includeUnpublished,
+        rootNodes = classificationDao.listRankSpecificRootNodes(classification, null, rank, includeUnpublished,
                 null, null, null, 0);
         assertEquals("The absolut root nodes should be returned", 2, rootNodes.size());
     }
@@ -118,12 +122,12 @@ public class ClassificationDaoHibernateImplTest extends CdmTransactionalIntegrat
         // check preconditions
        checkPreconditions();
 
-        Classification classification = classificationDao.load(UUID.fromString(CLASSIFICATION_FULL_UUID));
+        Classification classification = classificationDao.load(CLASSIFICATION_FULL_UUID);
         includeUnpublished = false;
         Rank rank = Rank.GENUS();
         // run both queries in dao method since rank != null
-        List<TaxonNode> rootNodes = classificationDao.listRankSpecificRootNodes(classification, rank, includeUnpublished, null, null, null, 0);
-        rootNodes.addAll(classificationDao.listRankSpecificRootNodes(classification, rank, includeUnpublished, null, null, null, 1));
+        List<TaxonNode> rootNodes = classificationDao.listRankSpecificRootNodes(classification, null, rank, includeUnpublished, null, null, null, 0);
+        rootNodes.addAll(classificationDao.listRankSpecificRootNodes(classification, null, rank, includeUnpublished, null, null, null, 1));
         assertEquals("Only the genus should come back", 1, rootNodes.size());
         assertEquals(Rank.GENUS(), rootNodes.get(0).getTaxon().getName().getRank());
         assertEquals(UUID_ABIES, rootNodes.get(0).getTaxon().getUuid());
@@ -131,16 +135,16 @@ public class ClassificationDaoHibernateImplTest extends CdmTransactionalIntegrat
         rank = Rank.SUBGENUS();
         // run both queries in dao method since rank != null
         includeUnpublished = true;
-        rootNodes = classificationDao.listRankSpecificRootNodes(classification, rank, includeUnpublished, null, null, null, 0);
-        rootNodes.addAll(classificationDao.listRankSpecificRootNodes(classification, rank, includeUnpublished, null, null, null, 1));
+        rootNodes = classificationDao.listRankSpecificRootNodes(classification, null, rank, includeUnpublished, null, null, null, 0);
+        rootNodes.addAll(classificationDao.listRankSpecificRootNodes(classification, null, rank, includeUnpublished, null, null, null, 1));
         assertEquals("Only the 2 species should come back", 2, rootNodes.size());
         for (TaxonNode tn : rootNodes){
                assertEquals(Rank.SPECIES(), tn.getTaxon().getName().getRank());
         }
         // run both queries in dao method since rank != null
         includeUnpublished = false;
-        rootNodes = classificationDao.listRankSpecificRootNodes(classification, rank, includeUnpublished, null, null, null, 0);
-        rootNodes.addAll(classificationDao.listRankSpecificRootNodes(classification, rank, includeUnpublished, null, null, null, 1));
+        rootNodes = classificationDao.listRankSpecificRootNodes(classification, null, rank, includeUnpublished, null, null, null, 0);
+        rootNodes.addAll(classificationDao.listRankSpecificRootNodes(classification, null, rank, includeUnpublished, null, null, null, 1));
         assertEquals("Only the 1 published species should come back", 1, rootNodes.size());
         for (TaxonNode tn : rootNodes){
             assertEquals(Rank.SPECIES(), tn.getTaxon().getName().getRank());
@@ -148,29 +152,154 @@ public class ClassificationDaoHibernateImplTest extends CdmTransactionalIntegrat
 
         rank = Rank.SUBFAMILY();
         // run both queries in dao method since rank != null
-        rootNodes = classificationDao.listRankSpecificRootNodes(classification, rank, includeUnpublished, null, null, null, 0);
-        rootNodes.addAll(classificationDao.listRankSpecificRootNodes(classification, rank, includeUnpublished, null, null, null, 1));
+        rootNodes = classificationDao.listRankSpecificRootNodes(classification, null, rank, includeUnpublished, null, null, null, 0);
+        rootNodes.addAll(classificationDao.listRankSpecificRootNodes(classification, null, rank, includeUnpublished, null, null, null, 1));
         assertEquals("Only the genus should come back", 1, rootNodes.size());
         assertEquals(Rank.GENUS(), rootNodes.get(0).getTaxon().getName().getRank());
         assertEquals(UUID_ABIES, rootNodes.get(0).getTaxon().getUuid());
 
         rank = Rank.FAMILY();
         // run both queries in dao method since rank != null
-        rootNodes = classificationDao.listRankSpecificRootNodes(classification, rank, includeUnpublished, null, null, null, 0);
-        rootNodes.addAll(classificationDao.listRankSpecificRootNodes(classification, rank, includeUnpublished, null, null, null, 1));
+        rootNodes = classificationDao.listRankSpecificRootNodes(classification, null, rank, includeUnpublished, null, null, null, 0);
+        rootNodes.addAll(classificationDao.listRankSpecificRootNodes(classification, null, rank, includeUnpublished, null, null, null, 1));
         assertEquals("Only the family should come back", 1, rootNodes.size());
         assertEquals(Rank.FAMILY(), rootNodes.get(0).getTaxon().getName().getRank());
         assertEquals(UUID_PINACEAE, rootNodes.get(0).getTaxon().getUuid());
 
         rank = null;
         // run only fist query in dao method since rank == null
-        rootNodes = classificationDao.listRankSpecificRootNodes(classification, rank, includeUnpublished, null, null, null, 0);
+        rootNodes = classificationDao.listRankSpecificRootNodes(classification, null, rank, includeUnpublished, null, null, null, 0);
         assertEquals("Only the family as the absolut root node should come back", 1, rootNodes.size());
         assertEquals(Rank.FAMILY(), rootNodes.get(0).getTaxon().getName().getRank());
         assertEquals(UUID_PINACEAE, rootNodes.get(0).getTaxon().getUuid());
 
     }
 
+    /**
+     * Test listRankSpecificRootNode with an existing classification
+     */
+    @Test
+    @DataSet(value="ClassificationDaoHibernateImplTest.listRankSpecificRootNodes.xml")
+    public void testListRankSpecificRootNodesWithHierarchie_withSubtree() {
+
+        // check preconditions
+        checkPreconditions();
+
+        Classification classification = classificationDao.load(CLASSIFICATION_FULL_UUID);
+        TaxonNode subtree = taxonNodeDao.findByUuid(UUID_ABIES_NODE);
+
+        includeUnpublished = false;
+        Rank rank = null;
+        // run only first query as rank is null
+        List<TaxonNode> rootNodes = classificationDao.listRankSpecificRootNodes(classification, subtree, rank, includeUnpublished, null, null, null, 0);
+        assertEquals("Only 1 node - the Abies node - should come back as root node for the subtree", 1, rootNodes.size());
+        assertEquals(Rank.GENUS(), rootNodes.get(0).getTaxon().getName().getRank());
+        assertEquals(UUID_ABIES, rootNodes.get(0).getTaxon().getUuid());
+
+        rank = Rank.GENUS();
+        // run both queries in dao method since rank != null
+        rootNodes = classificationDao.listRankSpecificRootNodes(classification, subtree, rank, includeUnpublished, null, null, null, 0);
+        rootNodes.addAll(classificationDao.listRankSpecificRootNodes(classification, subtree, rank, includeUnpublished, null, null, null, 1));
+        assertEquals("Only 1 node - the Abies node - should come back", 1, rootNodes.size());
+        assertEquals(Rank.GENUS(), rootNodes.get(0).getTaxon().getName().getRank());
+        assertEquals(UUID_ABIES, rootNodes.get(0).getTaxon().getUuid());
+
+
+        rank = Rank.SUBGENUS();
+        // run both queries in dao method since rank != null
+        includeUnpublished = true;
+        rootNodes = classificationDao.listRankSpecificRootNodes(classification, subtree, rank, includeUnpublished, null, null, null, 0);
+        rootNodes.addAll(classificationDao.listRankSpecificRootNodes(classification, subtree, rank, includeUnpublished, null, null, null, 1));
+        assertEquals("Only the 2 species should come back", 2, rootNodes.size());
+        for (TaxonNode tn : rootNodes){
+            assertEquals(Rank.SPECIES(), tn.getTaxon().getName().getRank());
+        }
+        // same with unpublished
+        includeUnpublished = false;
+        rootNodes = classificationDao.listRankSpecificRootNodes(classification, subtree, rank, includeUnpublished, null, null, null, 0);
+        rootNodes.addAll(classificationDao.listRankSpecificRootNodes(classification, subtree, rank, includeUnpublished, null, null, null, 1));
+        assertEquals("Only the 1 published species should come back", 1, rootNodes.size());
+        for (TaxonNode tn : rootNodes){
+            assertEquals(Rank.SPECIES(), tn.getTaxon().getName().getRank());
+        }
+
+        rank = Rank.SUBFAMILY();
+        // run both queries in dao method since rank != null
+        rootNodes = classificationDao.listRankSpecificRootNodes(classification, subtree, rank, includeUnpublished, null, null, null, 0);
+        rootNodes.addAll(classificationDao.listRankSpecificRootNodes(classification, subtree, rank, includeUnpublished, null, null, null, 1));
+        assertEquals("Only the genus should come back", 1, rootNodes.size());
+        assertEquals(Rank.GENUS(), rootNodes.get(0).getTaxon().getName().getRank());
+        assertEquals(UUID_ABIES, rootNodes.get(0).getTaxon().getUuid());
+
+        rank = Rank.FAMILY();
+        // run both queries in dao method since rank != null
+        rootNodes = classificationDao.listRankSpecificRootNodes(classification, subtree, rank, includeUnpublished, null, null, null, 0);
+        rootNodes.addAll(classificationDao.listRankSpecificRootNodes(classification, subtree, rank, includeUnpublished, null, null, null, 1));
+        assertEquals("Only the genus should come back as family is not in subtree", 1, rootNodes.size());
+        assertEquals(Rank.GENUS(), rootNodes.get(0).getTaxon().getName().getRank());
+        assertEquals(UUID_ABIES, rootNodes.get(0).getTaxon().getUuid());
+
+        //no classification filter
+        //should have no effect as subtree is kind of classification filter
+        TaxonNode rootNode = classification.getRootNode();
+        classification = null;
+        rank = null;
+        rootNodes = classificationDao.listRankSpecificRootNodes(classification, subtree, rank, includeUnpublished, null, null, null, 0);
+        assertEquals("Only 1 node - the Abies node - should come back", 1, rootNodes.size());
+        assertEquals(UUID_ABIES, rootNodes.get(0).getTaxon().getUuid());
+
+        rank = Rank.GENUS();
+        rootNodes = classificationDao.listRankSpecificRootNodes(null, subtree, rank, includeUnpublished, null, null, null, 0);
+        rootNodes.addAll(classificationDao.listRankSpecificRootNodes(classification, null, rank, includeUnpublished, null, null, null, 1));
+        assertEquals("Only 1 node - the Abies node - should come back", 1, rootNodes.size());
+        assertEquals(UUID_ABIES, rootNodes.get(0).getTaxon().getUuid());
+
+        rank = Rank.SUBGENUS();
+        includeUnpublished = true;
+        rootNodes = classificationDao.listRankSpecificRootNodes(classification, subtree, rank, includeUnpublished, null, null, null, 0);
+        rootNodes.addAll(classificationDao.listRankSpecificRootNodes(classification, subtree, rank, includeUnpublished, null, null, null, 1));
+        assertEquals("Only the 2 species should come back", 2, rootNodes.size());
+
+        //with root node
+        subtree = rootNode;
+        rank = null;
+        rootNodes = classificationDao.listRankSpecificRootNodes(classification, subtree, rank, includeUnpublished, null, null, null, 0);
+        assertEquals("Only the family should come back", 1, rootNodes.size());
+        assertEquals(Rank.FAMILY(), rootNodes.get(0).getTaxon().getName().getRank());
+        assertEquals(UUID_PINACEAE, rootNodes.get(0).getTaxon().getUuid());
+
+        rank = Rank.GENUS();
+        rootNodes = classificationDao.listRankSpecificRootNodes(null, subtree, rank, includeUnpublished, null, null, null, 0);
+        rootNodes.addAll(classificationDao.listRankSpecificRootNodes(classification, null, rank, includeUnpublished, null, null, null, 1));
+        assertEquals("Only 1 node - the Abies node - should come back", 1, rootNodes.size());
+        assertEquals(UUID_ABIES, rootNodes.get(0).getTaxon().getUuid());
+
+        rank = Rank.SUBGENUS();
+        includeUnpublished = true;
+        rootNodes = classificationDao.listRankSpecificRootNodes(classification, subtree, rank, includeUnpublished, null, null, null, 0);
+        rootNodes.addAll(classificationDao.listRankSpecificRootNodes(classification, subtree, rank, includeUnpublished, null, null, null, 1));
+        assertEquals("Only the 2 species should come back", 2, rootNodes.size());
+
+
+        //flat hierarchie
+        classification = classificationDao.load(FLAT_CLASSIFICATION_UUID);
+        includeUnpublished = false;
+
+        rank = null;
+        rootNodes = classificationDao.listRankSpecificRootNodes(classification, subtree, rank, includeUnpublished, null, null, null, 0);
+        assertEquals("No subtree should be returned as subtree is not from classification", 0, rootNodes.size());
+
+        subtree = taxonNodeDao.findByUuid(UUID_FLAT_ROOT);
+        rootNodes = classificationDao.listRankSpecificRootNodes(classification, subtree, rank, includeUnpublished, null, null, null, 0);
+//        assertEquals("The 2 published species should be returned", 2, rootNodes.size());
+
+        rank = Rank.GENUS();
+        rootNodes = classificationDao.listRankSpecificRootNodes(classification, subtree, rank, includeUnpublished, null, null, null, 0);
+        rootNodes.addAll(classificationDao.listRankSpecificRootNodes(classification, subtree, rank, includeUnpublished, null, null, null, 1));
+        assertEquals(2, rootNodes.size());  //5002 in unpublished
+
+    }
+
     /**
      * Test listRankSpecificRootNode with all classifications
      */
@@ -181,13 +310,13 @@ public class ClassificationDaoHibernateImplTest extends CdmTransactionalIntegrat
 
        Rank rank = Rank.GENUS();
         // run both queries in dao method since rank != null
-        List<TaxonNode> rootNodes = classificationDao.listRankSpecificRootNodes(classification, rank, includeUnpublished, null, null, null, 0);
-        rootNodes.addAll(classificationDao.listRankSpecificRootNodes(classification, rank, includeUnpublished, null, null, null, 1));
+        List<TaxonNode> rootNodes = classificationDao.listRankSpecificRootNodes(classification, null, rank, includeUnpublished, null, null, null, 0);
+        rootNodes.addAll(classificationDao.listRankSpecificRootNodes(classification, null, rank, includeUnpublished, null, null, null, 1));
         assertEquals("3 Species from no hierarchie and 1 genus from hierarchie should return", 4, rootNodes.size());
 
         rank = null;
         // run only fist query in dao method since rank == null
-        rootNodes = classificationDao.listRankSpecificRootNodes(classification, rank, includeUnpublished, null, null, null, 0);
+        rootNodes = classificationDao.listRankSpecificRootNodes(classification, null, rank, includeUnpublished, null, null, null, 0);
         assertEquals("4 taxa should return (3 species from no hierarchie, 1 family, from hierarchie classification", 4, rootNodes.size());
     }
 
@@ -205,7 +334,7 @@ public class ClassificationDaoHibernateImplTest extends CdmTransactionalIntegrat
     @DataSet(value="ClassificationDaoHibernateImplTest.listRankSpecificRootNodes.xml")
     public void testClassificationLookup() {
 
-        Classification classification = classificationDao.load(UUID.fromString(CLASSIFICATION_FULL_UUID));
+        Classification classification = classificationDao.load(CLASSIFICATION_FULL_UUID);
         ClassificationLookupDTO classificationLookupDto = classificationDao.classificationLookup(classification);
         assertEquals(4, classificationLookupDto.getTaxonIds().size());
     }
@@ -224,7 +353,7 @@ public class ClassificationDaoHibernateImplTest extends CdmTransactionalIntegrat
 
            // 1. create the entities   and save them
            Classification flatHierarchieClassification = Classification.NewInstance("European Abies");
-           flatHierarchieClassification.setUuid(UUID.fromString(CLASSIFICATION_UUID));
+           flatHierarchieClassification.setUuid(FLAT_CLASSIFICATION_UUID);
            classificationDao.save(flatHierarchieClassification);
 
            Reference sec = ReferenceFactory.newBook();
@@ -277,7 +406,7 @@ public class ClassificationDaoHibernateImplTest extends CdmTransactionalIntegrat
 
            // 1. create the entities   and save them
            Classification fullHierarchieClassification = Classification.NewInstance("European Abies full hierarchie");
-           fullHierarchieClassification.setUuid(UUID.fromString(CLASSIFICATION_FULL_UUID));
+           fullHierarchieClassification.setUuid(CLASSIFICATION_FULL_UUID);
            classificationDao.save(fullHierarchieClassification);
 
            fullHierarchieClassification.addParentChild(t_pinaceae, t_abies, null, null);
index 6dbf1dc4643bc5427f8c1d4431e7aeb5b6920f4a..f493f3db083c3e76af6009dd8b25a98316d43e86 100644 (file)
@@ -57,6 +57,7 @@ import eu.etaxonomy.cdm.persistence.dao.common.IDefinedTermDao;
 import eu.etaxonomy.cdm.persistence.dao.reference.IReferenceDao;
 import eu.etaxonomy.cdm.persistence.dao.taxon.IClassificationDao;
 import eu.etaxonomy.cdm.persistence.dao.taxon.ITaxonDao;
+import eu.etaxonomy.cdm.persistence.dao.taxon.ITaxonNodeDao;
 import eu.etaxonomy.cdm.persistence.dto.UuidAndTitleCache;
 import eu.etaxonomy.cdm.persistence.query.GroupByCount;
 import eu.etaxonomy.cdm.persistence.query.GroupByDate;
@@ -77,6 +78,9 @@ public class TaxonDaoHibernateImplTest extends CdmTransactionalIntegrationTest {
     @SpringBeanByType
     private ITaxonDao taxonDao;
 
+    @SpringBeanByType
+    private ITaxonNodeDao taxonNodeDao;
+
     @SpringBeanByType
     private IClassificationDao classificationDao;
 
@@ -86,16 +90,19 @@ public class TaxonDaoHibernateImplTest extends CdmTransactionalIntegrationTest {
     @SpringBeanByType
     private IDefinedTermDao definedTermDao;
 
-    private UUID uuid;
-    private UUID sphingidae;
-    private UUID acherontia;
-    private UUID rethera;
-    private UUID retheraSecCdmtest;
-    private UUID atroposAgassiz; // a Synonym
-    private UUID atroposOken;  // a Synonym
-    private UUID atroposLeach; // a Synonym
-    private UUID acherontiaLachesis;
-    private UUID aus;
+    private UUID uuid = UUID.fromString("496b1325-be50-4b0a-9aa2-3ecd610215f2");
+    private UUID sphingidae = UUID.fromString("54e767ee-894e-4540-a758-f906ecb4e2d9");
+    private UUID acherontia = UUID.fromString("c5cc8674-4242-49a4-aada-72d63194f5fa");
+    private UUID rethera = UUID.fromString("a9f42927-e507-4fda-9629-62073a908aae");
+    private UUID retheraSecCdmtest = UUID.fromString("a9f42927-e507-4fda-9629-62073a908aae");
+    private UUID atroposAgassiz = UUID.fromString("d75b2e3d-7394-4ada-b6a5-93175b8751c1"); // a Synonym
+    private UUID atroposOken = UUID.fromString("6bfedf25-6dbc-4d5c-9d56-84f9052f3b2a");  // a Synonym
+    private UUID atroposLeach = UUID.fromString("3da4ab34-6c50-4586-801e-732615899b07"); // a Synonym
+    private UUID acherontiaLachesis = UUID.fromString("b04cc9cb-2b4a-4cc4-a94a-3c93a2158b06");
+    private UUID aus = UUID.fromString("496b1325-be50-4b0a-9aa2-3ecd610215f2");
+
+    private UUID UUID_ACHERONTIA_NODE = UUID.fromString("56b10cf0-9522-407e-9f90-0c2dba263c94");
+    private UUID UUID_CLASSIFICATION2 = UUID.fromString("a71467a6-74dc-4148-9530-484628a5ab0e");
 
     private AuditEvent previousAuditEvent;
     private AuditEvent mostRecentAuditEvent;
@@ -128,16 +135,6 @@ public class TaxonDaoHibernateImplTest extends CdmTransactionalIntegrationTest {
     @Before
     public void setUp() {
 
-        uuid = UUID.fromString("496b1325-be50-4b0a-9aa2-3ecd610215f2");
-        sphingidae = UUID.fromString("54e767ee-894e-4540-a758-f906ecb4e2d9");
-        acherontia = UUID.fromString("c5cc8674-4242-49a4-aada-72d63194f5fa");
-        acherontiaLachesis = UUID.fromString("b04cc9cb-2b4a-4cc4-a94a-3c93a2158b06");
-        atroposAgassiz = UUID.fromString("d75b2e3d-7394-4ada-b6a5-93175b8751c1");
-        atroposOken = UUID.fromString("6bfedf25-6dbc-4d5c-9d56-84f9052f3b2a");
-        atroposLeach =  UUID.fromString("3da4ab34-6c50-4586-801e-732615899b07");
-        rethera = UUID.fromString("a9f42927-e507-4fda-9629-62073a908aae");
-        retheraSecCdmtest = UUID.fromString("a9f42927-e507-4fda-9629-62073a908aae");
-        aus = UUID.fromString("496b1325-be50-4b0a-9aa2-3ecd610215f2");
 
         previousAuditEvent = new AuditEvent();
         previousAuditEvent.setRevisionNumber(1025);
@@ -234,7 +231,7 @@ public class TaxonDaoHibernateImplTest extends CdmTransactionalIntegrationTest {
     public void testGetTaxaByNameWithMisappliedNames(){
 
         Classification classification = classificationDao.load(classificationUuid);
-
+        TaxonNode subtree = null;
         /* NOTE:
          * The testdata contains 3 misapplied names (1. nameCache = Aus, 2. nameCache = Rethera, 3. nameCache = Daphnis),
          * two contained in the classification used in this test,
@@ -243,49 +240,49 @@ public class TaxonDaoHibernateImplTest extends CdmTransactionalIntegrationTest {
          */
         //two accepted taxa starting with R in classification "TestBaum"
         @SuppressWarnings("rawtypes")
-        List<TaxonBase> results = taxonDao.getTaxaByName(doTaxa, noSynonyms, noMisapplied, noCommonNames, false, "R*", classification, MatchMode.BEGINNING,
+        List<TaxonBase> results = taxonDao.getTaxaByName(doTaxa, noSynonyms, noMisapplied, noCommonNames, false, "R*", classification, subtree, MatchMode.BEGINNING,
                 null, includeUnpublished, null, null, null, null);
         Assert.assertEquals("There should be 2 Taxa", 2, results.size());
 
         //three taxa, 2 accepted and 1 misapplied name starting with R
-        results = taxonDao.getTaxaByName(doTaxa, noSynonyms, doMisapplied, noCommonNames, false, "R*", null, MatchMode.BEGINNING,
+        results = taxonDao.getTaxaByName(doTaxa, noSynonyms, doMisapplied, noCommonNames, false, "R*", null, subtree, MatchMode.BEGINNING,
                 null, includeUnpublished, null, null, null, null);
         Assert.assertEquals("There should be 3 Taxa", 3, results.size());
 
         //one synonym has no accepted taxon
-        results = taxonDao.getTaxaByName(doTaxa, doSynonyms, doMisapplied, noCommonNames, false, "A*", null, MatchMode.BEGINNING,
+        results = taxonDao.getTaxaByName(doTaxa, doSynonyms, doMisapplied, noCommonNames, false, "A*", null, subtree, MatchMode.BEGINNING,
                 null, includeUnpublished, null, null, null, null);
         Assert.assertEquals("There should be 11 Taxa",11, results.size());
 
         //two accepted taxa in classification and 1 misapplied name with accepted name in classification
-        results = taxonDao.getTaxaByName(doTaxa, doSynonyms, doMisapplied, noCommonNames, false, "R*", classification, MatchMode.BEGINNING,
+        results = taxonDao.getTaxaByName(doTaxa, doSynonyms, doMisapplied, noCommonNames, false, "R*", classification, subtree, MatchMode.BEGINNING,
                 null, includeUnpublished, null, null, null, null);
         Assert.assertEquals("There should be 3 Taxa", 3, results.size());
         //same with unpublished
         includeUnpublished = false;
-        results = taxonDao.getTaxaByName(doTaxa, doSynonyms, doMisapplied, noCommonNames, false, "R*", classification, MatchMode.BEGINNING,
+        results = taxonDao.getTaxaByName(doTaxa, doSynonyms, doMisapplied, noCommonNames, false, "R*", classification, subtree, MatchMode.BEGINNING,
                 null, includeUnpublished, null, null, null, null);
         Assert.assertEquals("There should be 3 Taxa", 3, results.size());
         includeUnpublished = true;
 
 
         //same as above because all taxa, synonyms and misapplied names starting with R are in the classification
-        results = taxonDao.getTaxaByName(doTaxa, doSynonyms, doMisapplied, noCommonNames, false, "R*", null, MatchMode.BEGINNING,
+        results = taxonDao.getTaxaByName(doTaxa, doSynonyms, doMisapplied, noCommonNames, false, "R*", null, subtree, MatchMode.BEGINNING,
                 null, includeUnpublished, null, null, null, null);
         Assert.assertEquals("There should be 3 Taxa", 3, results.size());
 
         //find misapplied names with accepted taxon in the classification, the accepted taxa of two misapplied names are in the classification
-        results = taxonDao.getTaxaByName(noTaxa, noSynonyms, doMisapplied, noCommonNames, false, "*", classification, MatchMode.BEGINNING,
+        results = taxonDao.getTaxaByName(noTaxa, noSynonyms, doMisapplied, noCommonNames, false, "*", classification, subtree, MatchMode.BEGINNING,
                 null, includeUnpublished, null, null, null, null);
         Assert.assertEquals("There should be 2 Taxa", 2, results.size());
 
         //find misapplied names beginning with R
-        results = taxonDao.getTaxaByName(noTaxa, noSynonyms, doMisapplied, noCommonNames, false, "R*", null, MatchMode.BEGINNING,
+        results = taxonDao.getTaxaByName(noTaxa, noSynonyms, doMisapplied, noCommonNames, false, "R*", null, subtree, MatchMode.BEGINNING,
                 null, includeUnpublished, null, null, null, null);
         Assert.assertEquals("There should be 1 Taxa", 1, results.size());
 
         //find all three misapplied names
-        results = taxonDao.getTaxaByName(noTaxa, noSynonyms, doMisapplied, noCommonNames, false, "*", null, MatchMode.BEGINNING,
+        results = taxonDao.getTaxaByName(noTaxa, noSynonyms, doMisapplied, noCommonNames, false, "*", null, subtree, MatchMode.BEGINNING,
                 null, includeUnpublished, null, null, null, null);
         Assert.assertEquals("There should be 3 Taxa", 3, results.size());
 
@@ -296,35 +293,36 @@ public class TaxonDaoHibernateImplTest extends CdmTransactionalIntegrationTest {
     @Test
     @DataSet (loadStrategy=CleanSweepInsertLoadStrategy.class, value="TaxonDaoHibernateImplTest.testGetTaxaByNameAndArea.xml")
     public void testGetTaxaByNameVariants(){
+        TaxonNode subtree = null;
         @SuppressWarnings("rawtypes")
-        List<TaxonBase> results = taxonDao.getTaxaByName(noTaxa, noSynonyms, noMisapplied, doCommonNames, false, "c*", null, MatchMode.BEGINNING,
+        List<TaxonBase> results = taxonDao.getTaxaByName(noTaxa, noSynonyms, noMisapplied, doCommonNames, false, "c*", null, subtree, MatchMode.BEGINNING,
                 null, includeUnpublished, null, null, null, null);
         Assert.assertEquals("There should be 2 Taxa",2, results.size());
 
-        results = taxonDao.getTaxaByName(noTaxa, noSynonyms, doMisapplied, doCommonNames, false, "R*", null, MatchMode.BEGINNING,
+        results = taxonDao.getTaxaByName(noTaxa, noSynonyms, doMisapplied, doCommonNames, false, "R*", null, subtree, MatchMode.BEGINNING,
                 null, includeUnpublished, null, null, null, null);
         Assert.assertEquals("There should be 1 Taxa", 1, results.size());
 
-        results = taxonDao.getTaxaByName(noTaxa, doSynonyms, doMisapplied, doCommonNames, false, "R*", null, MatchMode.BEGINNING,
+        results = taxonDao.getTaxaByName(noTaxa, doSynonyms, doMisapplied, doCommonNames, false, "R*", null, subtree, MatchMode.BEGINNING,
                 null, includeUnpublished, null, null, null, null);
         Assert.assertEquals("There should be 1 Taxa", 1, results.size());
 
-        results = taxonDao.getTaxaByName(noTaxa, doSynonyms, noMisapplied, doCommonNames, false, "c*", null, MatchMode.BEGINNING,
+        results = taxonDao.getTaxaByName(noTaxa, doSynonyms, noMisapplied, doCommonNames, false, "c*", null, subtree, MatchMode.BEGINNING,
                 null, includeUnpublished, null, null, null, null);
         Assert.assertEquals("There should be 2 Taxa", 2, results.size());
 
-        results = taxonDao.getTaxaByName(doTaxa, noSynonyms, noMisapplied, doCommonNames, false, "c*", null, MatchMode.BEGINNING,
+        results = taxonDao.getTaxaByName(doTaxa, noSynonyms, noMisapplied, doCommonNames, false, "c*", null, subtree, MatchMode.BEGINNING,
                 null, includeUnpublished, null, null, null, null);
         Assert.assertEquals("There should be 2 Taxa", 2, results.size());
 
         Classification classification = classificationDao.load(classificationUuid);
-        results = taxonDao.getTaxaByName(noTaxa, noSynonyms, noMisapplied, doCommonNames, false, "c*", classification, MatchMode.BEGINNING,
+        results = taxonDao.getTaxaByName(noTaxa, noSynonyms, noMisapplied, doCommonNames, false, "c*", classification, subtree, MatchMode.BEGINNING,
                 null, includeUnpublished, null, null, null, null);
         Assert.assertEquals("There should be 1 Taxa", 1, results.size());
 
         Set<NamedArea> namedAreas = new HashSet<>();
         namedAreas.add((NamedArea)definedTermDao.load(southernAmericaUuid));
-        results = taxonDao.getTaxaByName(noTaxa, noSynonyms, noMisapplied, doCommonNames, false, "c*", null, MatchMode.BEGINNING,
+        results = taxonDao.getTaxaByName(noTaxa, noSynonyms, noMisapplied, doCommonNames, false, "c*", null, subtree, MatchMode.BEGINNING,
                 namedAreas, includeUnpublished, null, null, null, null);
         Assert.assertEquals("There should be 1 Taxa", 1, results.size());
     }
@@ -335,37 +333,36 @@ public class TaxonDaoHibernateImplTest extends CdmTransactionalIntegrationTest {
     @Test
     @DataSet
     public void testGetTaxaByNameForEditor() {
+        TaxonNode subtree = null;
         Reference sec = referenceDao.findById(1);
         assert sec != null : "sec must exist";
 
         @SuppressWarnings("rawtypes")
         List<UuidAndTitleCache<? extends IdentifiableEntity>> results = taxonDao.getTaxaByNameForEditor(
-                doTaxa, doSynonyms, noMisapplied, noCommonNames, false, includeUnpublished, "Acher", null, MatchMode.BEGINNING, null, null);
+                doTaxa, doSynonyms, noMisapplied, noCommonNames, false, includeUnpublished, "Acher", null, subtree, MatchMode.BEGINNING, null, null);
         assertNotNull("getTaxaByName should return a List", results);
         assertFalse("The list should not be empty", results.isEmpty());
         assertEquals(4, results.size());
 
-
-        results = taxonDao.getTaxaByNameForEditor(doTaxa, doSynonyms, noMisapplied, noCommonNames, false,includeUnpublished, "A",null, MatchMode.BEGINNING, null, null);
+        results = taxonDao.getTaxaByNameForEditor(doTaxa, doSynonyms, noMisapplied, noCommonNames, false,includeUnpublished, "A", null, subtree, MatchMode.BEGINNING, null, null);
         assertNotNull("getTaxaByName should return a List", results);
         assertEquals(7, results.size());
 
-
-        results = taxonDao.getTaxaByNameForEditor(doTaxa, noSynonyms, noMisapplied, noCommonNames, false, includeUnpublished, "A", null, MatchMode.BEGINNING, null, null);
+        results = taxonDao.getTaxaByNameForEditor(doTaxa, noSynonyms, noMisapplied, noCommonNames, false, includeUnpublished, "A", null, subtree, MatchMode.BEGINNING, null, null);
         assertNotNull("getTaxaByName should return a List", results);
         assertEquals(5, results.size());
         assertEquals(results.get(0).getType(), Taxon.class);
 
-        results = taxonDao.getTaxaByNameForEditor(noTaxa, doSynonyms, noMisapplied, noCommonNames, false, includeUnpublished, "A", null, MatchMode.BEGINNING, null, null);
+        results = taxonDao.getTaxaByNameForEditor(noTaxa, doSynonyms, noMisapplied, noCommonNames, false, includeUnpublished, "A", null, subtree, MatchMode.BEGINNING, null, null);
         assertNotNull("getTaxaByName should return a List", results);
         assertEquals(2, results.size());
         assertEquals(results.get(0).getType(), Synonym.class);
 
-        results = taxonDao.getTaxaByNameForEditor(doTaxa, doSynonyms, noMisapplied, noCommonNames, false,includeUnpublished, "Aus", null, MatchMode.EXACT, null, null);
+        results = taxonDao.getTaxaByNameForEditor(doTaxa, doSynonyms, noMisapplied, noCommonNames, false,includeUnpublished, "Aus", null, subtree, MatchMode.EXACT, null, null);
         assertNotNull("getTaxaByName should return a List", results);
         assertEquals("Results list should contain one entity",1,results.size());
 
-        results = taxonDao.getTaxaByNameForEditor(doTaxa, doSynonyms, doMisapplied, noCommonNames, false, includeUnpublished, "A", null, MatchMode.BEGINNING, null, null);
+        results = taxonDao.getTaxaByNameForEditor(doTaxa, doSynonyms, doMisapplied, noCommonNames, false, includeUnpublished, "A", null, subtree, MatchMode.BEGINNING, null, null);
         assertNotNull("getTaxaByName should return a List", results);
         assertEquals("Results list should contain one entity", 8, results.size());
 
@@ -381,13 +378,13 @@ public class TaxonDaoHibernateImplTest extends CdmTransactionalIntegrationTest {
     @Test
     @DataSet(loadStrategy=CleanSweepInsertLoadStrategy.class, value="TaxonDaoHibernateImplTest.testGetTaxaByNameAndArea.xml")
     public void testGetTaxaByNameAndArea() {
-
+        TaxonNode subtree = null;
         Set<NamedArea> namedAreas = new HashSet<>();
         namedAreas.add((NamedArea)definedTermDao.load(northernAmericaUuid));
         //namedAreas.add((NamedArea)definedTermDao.load(southernAmericaUuid));
         //namedAreas.add((NamedArea)definedTermDao.load(antarcticaUuid));
 
-        Classification taxonmicTree = classificationDao.findByUuid(classificationUuid);
+        Classification classification = classificationDao.findByUuid(classificationUuid);
 
         // prepare some synonym relation ships for some tests
         Synonym synAtroposAgassiz = (Synonym)taxonDao.findByUuid(atroposAgassiz);
@@ -406,25 +403,25 @@ public class TaxonDaoHibernateImplTest extends CdmTransactionalIntegrationTest {
         //long numberOfTaxa = taxonDao.countTaxaByName(Taxon.class, "Rethera", null, MatchMode.BEGINNING, namedAreas);
 
         @SuppressWarnings("rawtypes")
-        List<TaxonBase> results = taxonDao.getTaxaByName(doTaxa, noSynonyms, noMisapplied, noCommonNames, false, "Rethera", null, MatchMode.BEGINNING, namedAreas,
+        List<TaxonBase> results = taxonDao.getTaxaByName(doTaxa, noSynonyms, noMisapplied, noCommonNames, false, "Rethera", null, subtree, MatchMode.BEGINNING, namedAreas,
                 includeUnpublished, null, null, null, null);
         assertNotNull("getTaxaByName should return a List", results);
         assertTrue("expected to find two taxa but found "+results.size(), results.size() == 2);
 
-        results = taxonDao.getTaxaByName(noTaxa, noSynonyms, noMisapplied, doCommonNames, false, "com*", null, MatchMode.BEGINNING, namedAreas,
+        results = taxonDao.getTaxaByName(noTaxa, noSynonyms, noMisapplied, doCommonNames, false, "com*", null, subtree, MatchMode.BEGINNING, namedAreas,
                 includeUnpublished, null, null, null, null);
             assertNotNull("getTaxaByName should return a List", results);
             assertTrue("expected to find one taxon but found "+results.size(), results.size() == 1);
 
         // 2. searching for a taxon (Rethera) contained in a specific classification
-        results = taxonDao.getTaxaByName(doTaxa, noSynonyms, noMisapplied, noCommonNames, false, "Rethera", taxonmicTree, MatchMode.BEGINNING, namedAreas,
+        results = taxonDao.getTaxaByName(doTaxa, noSynonyms, noMisapplied, noCommonNames, false, "Rethera", classification, subtree, MatchMode.BEGINNING, namedAreas,
                 includeUnpublished, null, null, null, null);
         assertNotNull("getTaxaByName should return a List", results);
         assertTrue("expected to find one taxon but found "+results.size(), results.size() == 1);
 
 
         // 3. searching for Synonyms
-        results = taxonDao.getTaxaByName(noTaxa, doSynonyms, noMisapplied, noCommonNames, false, "Atropo", null, MatchMode.ANYWHERE, null,
+        results = taxonDao.getTaxaByName(noTaxa, doSynonyms, noMisapplied, noCommonNames, false, "Atropo", null, subtree, MatchMode.ANYWHERE, null,
                 includeUnpublished, null, null, null, null);
         assertNotNull("getTaxaByName should return a List", results);
         /*System.err.println(results.get(0).getTitleCache() + " - " +results.get(1).getTitleCache() + " - " +results.get(2).getTitleCache() );
@@ -435,14 +432,14 @@ public class TaxonDaoHibernateImplTest extends CdmTransactionalIntegrationTest {
         assertTrue("expected to find three taxa but found "+results.size(), results.size() == 3);
 
         // 4. searching for Synonyms
-        results = taxonDao.getTaxaByName(noTaxa, doSynonyms, noMisapplied, noCommonNames, false,"Atropo", null, MatchMode.BEGINNING, null,
+        results = taxonDao.getTaxaByName(noTaxa, doSynonyms, noMisapplied, noCommonNames, false,"Atropo", null, subtree, MatchMode.BEGINNING, null,
                 includeUnpublished, null, null, null, null);
         assertNotNull("getTaxaByName should return a List", results);
         assertTrue("expected to find three taxa but found "+results.size(), results.size() == 3);
 
 
         // 5. searching for a Synonyms and Taxa
-        results = taxonDao.getTaxaByName(doTaxa, doSynonyms, noMisapplied, noCommonNames, false,"A", null, MatchMode.BEGINNING, namedAreas,
+        results = taxonDao.getTaxaByName(doTaxa, doSynonyms, noMisapplied, noCommonNames, false,"A", null, subtree, MatchMode.BEGINNING, namedAreas,
                 includeUnpublished, null, null, null, null);
         //only five taxa have a distribution
         assertNotNull("getTaxaByName should return a List", results);
@@ -451,12 +448,13 @@ public class TaxonDaoHibernateImplTest extends CdmTransactionalIntegrationTest {
 
 
     /**
-     * Test method for {@link eu.etaxonomy.cdm.persistence.dao.hibernate.taxon.TaxonDaoHibernateImpl#findByNameTitleCache(Class<? extends TaxonBase>clazz, String queryString, Classification classification, MatchMode matchMode, Set<NamedArea> namedAreas, Integer pageNumber, Integer pageSize, List<String> propertyPaths)}
+     * Test method for {@link eu.etaxonomy.cdm.persistence.dao.hibernate.taxon.TaxonDaoHibernateImpl#findByNameTitleCache(Class<? extends TaxonBase>clazz, String queryString, Classification classification, TaxonNode subtree, MatchMode matchMode, Set<NamedArea> namedAreas, Integer pageNumber, Integer pageSize, List<String> propertyPaths)}
      * restricting the search by a set of Areas.
      */
     @Test
     @DataSet(loadStrategy=CleanSweepInsertLoadStrategy.class, value="TaxonDaoHibernateImplTest.testGetTaxaByNameAndArea.xml")
     public void testFindByNameTitleCache() {
+        TaxonNode subtree = null;
 
         Set<NamedArea> namedAreas = new HashSet<>();
         namedAreas.add((NamedArea)definedTermDao.load(northernAmericaUuid));
@@ -469,41 +467,40 @@ public class TaxonDaoHibernateImplTest extends CdmTransactionalIntegrationTest {
         Synonym synAtroposAgassiz = (Synonym)taxonDao.findByUuid(atroposAgassiz);
         Taxon taxonRethera = (Taxon)taxonDao.findByUuid(rethera);
         taxonRethera.addSynonym(synAtroposAgassiz, SynonymType.SYNONYM_OF());
-        logger.warn("addSynonym(..)");
+        //logger.warn("addSynonym(..)");
         this.taxonDao.clear();
         Synonym synAtroposLeach = (Synonym)taxonDao.findByUuid(atroposLeach);
         Taxon taxonRetheraSecCdmtest = (Taxon)taxonDao.findByUuid(retheraSecCdmtest);
         taxonRetheraSecCdmtest.addSynonym(synAtroposLeach, SynonymType.SYNONYM_OF());
         this.taxonDao.clear();
+
         // 1. searching for a taxon (Rethera)
         //long numberOfTaxa = taxonDao.countTaxaByName(Taxon.class, "Rethera", null, MatchMode.BEGINNING, namedAreas);
 
         @SuppressWarnings("rawtypes")
-        List<TaxonBase> results = taxonDao.findByNameTitleCache(true, false, includeUnpublished, "Rethera Rothschild & Jordan, 1903", null, MatchMode.EXACT, namedAreas,
+        List<TaxonBase> results = taxonDao.findByNameTitleCache(doTaxa, noSynonyms, includeUnpublished, "Rethera Rothschild & Jordan, 1903", null, subtree, MatchMode.EXACT, namedAreas,
                 null, null, null, null);
         assertNotNull("getTaxaByName should return a List", results);
         assertTrue("expected to find two taxa but found "+results.size(), results.size() == 2);
 
         // 2. searching for a taxon (Rethera) contained in a specific classification
-        results = taxonDao.findByNameTitleCache(true, false, includeUnpublished, "Rethera Rothschild & Jordan, 1903", classification, MatchMode.EXACT, namedAreas,
+        results = taxonDao.findByNameTitleCache(doTaxa, noSynonyms, includeUnpublished, "Rethera Rothschild & Jordan, 1903", classification, subtree, MatchMode.EXACT, namedAreas,
                 null, null, null, null);
         assertNotNull("getTaxaByName should return a List", results);
         assertTrue("expected to find one taxon but found "+results.size(), results.size() == 1);
 
-
         // 3. searching for Synonyms
-        results = taxonDao.findByNameTitleCache(false, true, includeUnpublished, "*Atropo", null, MatchMode.ANYWHERE, null,
+        results = taxonDao.findByNameTitleCache(noTaxa, doSynonyms, includeUnpublished, "*Atropo", null, subtree, MatchMode.ANYWHERE, null,
                 null, null, null, null);
         assertNotNull("getTaxaByName should return a List", results);
         assertTrue("expected to find two taxa but found "+results.size(), results.size() == 2);
 
         // 4. searching for Synonyms
-        results = taxonDao.findByNameTitleCache(false, true, includeUnpublished, "Atropo", null, MatchMode.BEGINNING, null,
+        results = taxonDao.findByNameTitleCache(noTaxa, doSynonyms, includeUnpublished, "Atropo", null, subtree, MatchMode.BEGINNING, null,
                 null, null, null, null);
         assertNotNull("getTaxaByName should return a List", results);
         assertTrue("expected to find two taxa but found "+results.size(), results.size() == 2);
 
-
         // 5. searching for a Synonyms and Taxa
         //   attache a synonym first
         Synonym syn = (Synonym)taxonDao.findByUuid(this.atroposLeach);
@@ -511,7 +508,7 @@ public class TaxonDaoHibernateImplTest extends CdmTransactionalIntegrationTest {
         tax.addSynonym(syn, SynonymType.HETEROTYPIC_SYNONYM_OF());
 
         taxonDao.save(tax);
-        results = taxonDao.findByNameTitleCache(true, true, includeUnpublished, "A", null, MatchMode.BEGINNING, namedAreas,
+        results = taxonDao.findByNameTitleCache(doTaxa, doSynonyms, includeUnpublished, "A", null, subtree, MatchMode.BEGINNING, namedAreas,
                 null, null, null, null);
         assertNotNull("getTaxaByName should return a List", results);
         assertTrue("expected to find 8 taxa but found "+results.size(), results.size() == 8);
@@ -520,10 +517,11 @@ public class TaxonDaoHibernateImplTest extends CdmTransactionalIntegrationTest {
     @Test
     @DataSet(loadStrategy=CleanSweepInsertLoadStrategy.class, value="TaxonDaoHibernateImplTest.testGetTaxaByNameAndArea.xml")
     public void testTaxonNameInTwoClassifications(){
+        TaxonNode subtree = null;
         List<String> propertyPaths = new ArrayList<>();
         propertyPaths.add("taxonNodes");
         @SuppressWarnings("rawtypes")
-        List<TaxonBase> taxa = taxonDao.getTaxaByName(doTaxa, doSynonyms, noMisapplied, noCommonNames, false,"P", null, MatchMode.BEGINNING,
+        List<TaxonBase> taxa = taxonDao.getTaxaByName(doTaxa, doSynonyms, noMisapplied, noCommonNames, false,"P", null, subtree, MatchMode.BEGINNING,
                 null, includeUnpublished, null, null, null, null);
         Taxon taxon = (Taxon)taxa.get(0);
         Set<TaxonNode> nodes = taxon.getTaxonNodes();
@@ -535,9 +533,9 @@ public class TaxonDaoHibernateImplTest extends CdmTransactionalIntegrationTest {
     @Test
     @DataSet(loadStrategy=CleanSweepInsertLoadStrategy.class, value="TaxonDaoHibernateImplTest.testGetTaxaByNameAndArea.xml")
     public void testGetTaxaByNameProParteSynonym(){
-
+        TaxonNode subtree = null;
         @SuppressWarnings("rawtypes")
-        List<TaxonBase> taxa = taxonDao.getTaxaByName(noTaxa, doSynonyms, noMisapplied, noCommonNames, false, "A", null,
+        List<TaxonBase> taxa = taxonDao.getTaxaByName(noTaxa, doSynonyms, noMisapplied, noCommonNames, false, "A", null,subtree,
                 MatchMode.BEGINNING, null, includeUnpublished, null, null, null, null);
         Assert.assertEquals("2 synonyms and 1 pro parte synonym should be returned.", 3, taxa.size());
         assertTrue("Pro parte should exist", existsInCollection(taxa, acherontiaLachesis));
@@ -545,13 +543,13 @@ public class TaxonDaoHibernateImplTest extends CdmTransactionalIntegrationTest {
         assertTrue("2. normal synonym should exist", existsInCollection(taxa, atroposOken));
         //TODO shouldn't we also find orphaned synonyms (without accepted taxon) like Atropos Leach?
 
-        taxa = taxonDao.getTaxaByName(noTaxa, doSynonyms, noMisapplied, noCommonNames, false, "A", null,
+        taxa = taxonDao.getTaxaByName(noTaxa, doSynonyms, noMisapplied, noCommonNames, false, "A", null,subtree,
                 MatchMode.BEGINNING, null, NO_UNPUBLISHED, null, null, null, null);
         Assert.assertEquals("2 synonyms and no pro parte synonym should be returned.", 2, taxa.size());
         assertTrue("Normal synonym should exist", existsInCollection(taxa, atroposAgassiz));
         assertTrue("2. normal synonym should exist", existsInCollection(taxa, atroposOken));
 
-        taxa = taxonDao.getTaxaByName(noTaxa, noSynonyms, doMisapplied, noCommonNames, false, "A", null,
+        taxa = taxonDao.getTaxaByName(noTaxa, noSynonyms, doMisapplied, noCommonNames, false, "A", null,subtree,
                 MatchMode.BEGINNING, null, includeUnpublished, null, null, null, null);
         Assert.assertEquals("1 misapplied name, no pro parte synonym should be returned.", 1, taxa.size());
         assertTrue("Pro parte should exist", existsInCollection(taxa, aus));
@@ -592,20 +590,49 @@ public class TaxonDaoHibernateImplTest extends CdmTransactionalIntegrationTest {
     @Test
     @DataSet
     public void testCountTaxaByName() {
-        long numberOfTaxa = taxonDao.countTaxaByName(true, false, false, false,false, "A", null, MatchMode.BEGINNING, null, includeUnpublished);
+        TaxonNode subtree = null;
+        Classification classification= null;
+        long numberOfTaxa = taxonDao.countTaxaByName(doTaxa, noSynonyms, noMisapplied, noCommonNames,false, "A", classification, subtree, MatchMode.BEGINNING, null, includeUnpublished);
         assertEquals(5, numberOfTaxa);
-        numberOfTaxa = taxonDao.countTaxaByName(true, false, false, false, false,"Smerinthus kindermannii", null, MatchMode.EXACT, null, includeUnpublished);
+        numberOfTaxa = taxonDao.countTaxaByName(doTaxa, noSynonyms, noMisapplied, noCommonNames, false, "S", classification, subtree, MatchMode.BEGINNING, null, includeUnpublished);
+        assertEquals("Sphingidae, Smerinthus, Smerinthus kindermannii and Sphingonaepiopsis expected", 4, numberOfTaxa);
+        numberOfTaxa = taxonDao.countTaxaByName(doTaxa, noSynonyms, noMisapplied, noCommonNames, false, "Smerinthus kindermannii", classification, subtree, MatchMode.EXACT, null, includeUnpublished);
         assertEquals(1, numberOfTaxa);
-        numberOfTaxa = taxonDao.countTaxaByName(false, true, false, false, false,"A", null, MatchMode.BEGINNING, null, includeUnpublished);
+        numberOfTaxa = taxonDao.countTaxaByName(noTaxa, doSynonyms, noMisapplied, noCommonNames, false, "A", classification, subtree, MatchMode.BEGINNING, null, includeUnpublished);
         assertEquals(2, numberOfTaxa);
-        numberOfTaxa = taxonDao.countTaxaByName(true, true, false, false, false,"A", null, MatchMode.BEGINNING, null, includeUnpublished);
+        numberOfTaxa = taxonDao.countTaxaByName(doTaxa, doSynonyms, noMisapplied, noCommonNames, false, "A", classification, subtree, MatchMode.BEGINNING, null, includeUnpublished);
         assertEquals(7, numberOfTaxa);
-        numberOfTaxa = taxonDao.countTaxaByName(true, true, false, false,false, "Aasfwerfwf fffe", null, MatchMode.BEGINNING, null, includeUnpublished);
+        numberOfTaxa = taxonDao.countTaxaByName(doTaxa, doSynonyms, noMisapplied, noCommonNames, false, "Aasfwerfwf fffe", classification, subtree, MatchMode.BEGINNING, null, includeUnpublished);
+        assertEquals(0, numberOfTaxa);
+
+        subtree = taxonNodeDao.findByUuid(UUID_ACHERONTIA_NODE);
+        numberOfTaxa = taxonDao.countTaxaByName(doTaxa, noSynonyms, noMisapplied, noCommonNames, false, "A", classification, subtree, MatchMode.BEGINNING, null, includeUnpublished);
+        assertEquals("Acherontia and 2 A. species expected", 3, numberOfTaxa);
+        numberOfTaxa = taxonDao.countTaxaByName(doTaxa, noSynonyms, noMisapplied, noCommonNames, false, "S", classification, subtree, MatchMode.BEGINNING, null, includeUnpublished);
+        assertEquals("", 0, numberOfTaxa);
+        numberOfTaxa = taxonDao.countTaxaByName(doTaxa, noSynonyms, noMisapplied, noCommonNames, false, "Smerinthus kindermannii", classification, subtree, MatchMode.EXACT, null, includeUnpublished);
+        assertEquals("Smerinthus is not in subtree", 0, numberOfTaxa);
+        numberOfTaxa = taxonDao.countTaxaByName(noTaxa, doSynonyms, noMisapplied, noCommonNames, false, "A", classification, subtree, MatchMode.BEGINNING, null, includeUnpublished);
+        assertEquals("Atropos Agassiz and Atropos Oken expected as Synonyms", 2, numberOfTaxa);
+        numberOfTaxa = taxonDao.countTaxaByName(doTaxa, doSynonyms, noMisapplied, noCommonNames, false, "A", classification, subtree, MatchMode.BEGINNING, null, includeUnpublished);
+        assertEquals("The above accepted and synonyms expected", 5, numberOfTaxa);
+        numberOfTaxa = taxonDao.countTaxaByName(doTaxa, doSynonyms, noMisapplied, noCommonNames, false, "Aasfwerfwf fffe", classification, subtree, MatchMode.BEGINNING, null, includeUnpublished);
+        assertEquals(0, numberOfTaxa);
+
+        classification = classificationDao.findByUuid(UUID_CLASSIFICATION2);
+        subtree = null;
+        numberOfTaxa = taxonDao.countTaxaByName(doTaxa, noSynonyms, noMisapplied, noCommonNames, false, "A", classification, subtree, MatchMode.BEGINNING, null, includeUnpublished);
+        assertEquals("Acherontia and 2 A. species expected", 3, numberOfTaxa);
+        numberOfTaxa = taxonDao.countTaxaByName(doTaxa, noSynonyms, noMisapplied, noCommonNames, false, "S", classification, subtree, MatchMode.BEGINNING, null, includeUnpublished);
+        assertEquals("Sphingidae expected", 1, numberOfTaxa);
+        numberOfTaxa = taxonDao.countTaxaByName(doTaxa, noSynonyms, noMisapplied, noCommonNames, false, "Smerinthus kindermannii", classification, subtree, MatchMode.EXACT, null, includeUnpublished);
+        assertEquals("Smerinthus is not in subtree", 0, numberOfTaxa);
+        numberOfTaxa = taxonDao.countTaxaByName(noTaxa, doSynonyms, noMisapplied, noCommonNames, false, "A", classification, subtree, MatchMode.BEGINNING, null, includeUnpublished);
+        assertEquals("Atropos Agassiz and Atropos Oken expected as Synonyms", 2, numberOfTaxa);
+        numberOfTaxa = taxonDao.countTaxaByName(doTaxa, doSynonyms, noMisapplied, noCommonNames, false, "A", classification, subtree, MatchMode.BEGINNING, null, includeUnpublished);
+        assertEquals("The above accepted and synonyms expected", 5, numberOfTaxa);
+        numberOfTaxa = taxonDao.countTaxaByName(doTaxa, doSynonyms, noMisapplied, noCommonNames, false, "Aasfwerfwf fffe", classification, subtree, MatchMode.BEGINNING, null, includeUnpublished);
         assertEquals(0, numberOfTaxa);
-//     FIXME implement test for search in specific classification
-//             Reference reference = referenceDao.findByUuid(UUID.fromString("596b1325-be50-4b0a-9aa2-3ecd610215f2"));
-//             numberOfTaxa = taxonDao.countTaxaByName("A*", MatchMode.BEGINNING, SelectMode.ALL, null, null);
-//             assertEquals(numberOfTaxa, 2);
     }
 
     @Test
index 1e3d7e578d28b8cc4e701457214468925604910f..9e69e241fa034420d09c9109fa208646ef1bf0de 100644 (file)
@@ -165,17 +165,18 @@ public class TaxonNodeDaoHibernateImplTest extends CdmTransactionalIntegrationTe
     public void testListChildren(){
         boolean includeUnpublished;
         Taxon t_acherontia = (Taxon) taxonDao.load(ACHERONTIA_UUID);
-
+        TaxonNode subtree = null;
         includeUnpublished = true;
+
         Classification classification =  classificationDao.load(ClassificationUuid);
         List<TaxonNode> children = classificationDao.listChildrenOf(
-                t_acherontia, classification, includeUnpublished, null, null, null);
+                t_acherontia, classification, subtree, includeUnpublished, null, null, null);
         assertNotNull(children);
         assertEquals(2, children.size());
 
         includeUnpublished = false;
         children = classificationDao.listChildrenOf(
-                t_acherontia, classification, includeUnpublished, null, null, null);
+                t_acherontia, classification, subtree, includeUnpublished, null, null, null);
         assertNotNull(children);
         assertEquals(1, children.size()); //1 is unpublished
 
index e9fd69942cefdad9c59a551a34bad9ff7e90c220..8d3cff9e66ac34af3a80ee5f140a4cca99c34403 100644 (file)
@@ -23,7 +23,6 @@ import java.util.Map;
 import java.util.Set;
 import java.util.UUID;
 
-import org.apache.log4j.Level;
 import org.apache.log4j.Logger;
 import org.hibernate.FlushMode;
 import org.hibernate.Hibernate;
@@ -260,7 +259,7 @@ public class AdvancedBeanInitializerTest extends CdmTransactionalIntegrationTest
 
         assureSessionClear();
 
-        Logger.getLogger(AdvancedBeanInitializer.class).setLevel(Level.TRACE);
+        //Logger.getLogger(AdvancedBeanInitializer.class).setLevel(Level.TRACE);
 
         Taxon taxon = (Taxon)taxonDao.load(taxonUuid, Arrays.asList("$"));
         assertTrue(Hibernate.isInitialized(taxon.getName()));
@@ -276,7 +275,7 @@ public class AdvancedBeanInitializerTest extends CdmTransactionalIntegrationTest
 
         assureSessionClear();
 
-        Logger.getLogger(AdvancedBeanInitializer.class).setLevel(Level.TRACE);
+        //Logger.getLogger(AdvancedBeanInitializer.class).setLevel(Level.TRACE);
 
         deacivatedAutoIntitializers = clearAutoinitializers();
         // load bean with autoinitializers deactivated
index 92eef8a674e658b1b8663e340a0b4f15102a29bc..e31deefd2f63460aae80e2a80b9f8a7c137a5b93 100644 (file)
   <REFERENCE  ID="1" CREATED="2008-12-10 09:56:07.0" UUID="596b1325-be50-4b0a-9aa2-3ecd610215f2" UPDATED="2008-12-10 09:56:07.253" TITLECACHE="Lorem ipsum" PROTECTEDTITLECACHE="true" PROTECTEDABBREVTITLECACHE="false" ABBREVTITLECACHE="Sp. Pl." ABBREVTITLE="Sp. Pl." NOMENCLATURALLYRELEVANT="false" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1"/>\r
   <REFERENCE  ID="2" CREATED="2008-12-10 09:56:07.0" UUID="ad4322b7-4b05-48af-be70-f113e46c545e" UPDATED="2008-12-10 09:56:07.253" TITLECACHE="cate-sphingidae.org" PROTECTEDTITLECACHE="true" PROTECTEDABBREVTITLECACHE="false" ABBREVTITLECACHE="Sp. Pl." ABBREVTITLE="Sp. Pl." NOMENCLATURALLYRELEVANT="false" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1"/>\r
   <REFERENCE  ID="3" CREATED="2008-12-10 09:56:07.0" UUID="3eea6f96-0682-4025-8cdd-aaaf7c915ae2" UPDATED="2008-12-10 09:56:07.253" TITLECACHE="cate-araceae.org" PROTECTEDTITLECACHE="true" PROTECTEDABBREVTITLECACHE="false" ABBREVTITLECACHE="Sp. Pl." ABBREVTITLE="Sp. Pl." NOMENCLATURALLYRELEVANT="false" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1"/>\r
+  <CLASSIFICATION ID="5000" CREATED="2015-06-18 09:36:39.0" UUID="2a5ceebb-4830-4524-b330-78461bf8cb6b" PROTECTEDTITLECACHE="true" TITLECACHE="Classification 1" ROOTNODE_ID="5000"/>\r
+  <CLASSIFICATION ID="5001" CREATED="2015-06-18 09:36:39.0" UUID="a71467a6-74dc-4148-9530-484628a5ab0e" PROTECTEDTITLECACHE="true" TITLECACHE="Classification 2" ROOTNODE_ID="5001"/>\r
+  <TAXONNODE ID="5000" CREATED="2015-06-18 09:36:39.0" UUID="75202d4e-b2aa-4343-8b78-340a52d15c40" SORTINDEX="-1" TREEINDEX="#t5000#5000#"      COUNTCHILDREN="3" TAXON_ID="[null]" EXCLUDED="FALSE" UNPLACED="FALSE" CLASSIFICATION_ID="5000" PARENT_ID="[null]"/>\r
+  <TAXONNODE ID="5002" CREATED="2015-06-18 09:36:39.0" UUID="6b76c838-bd8f-43f9-8fa9-077cd222a9b2" SORTINDEX="0"  TREEINDEX="#t5000#5000#5002#" COUNTCHILDREN="0" TAXON_ID="1"   EXCLUDED="FALSE" UNPLACED="FALSE" CLASSIFICATION_ID="5000" PARENT_ID="5000"/>\r
+  <TAXONNODE ID="5004" CREATED="2015-06-18 09:36:39.0" UUID="ebbae10f-d179-4a08-9939-9fed0a7f1433" SORTINDEX="2"  TREEINDEX="#t5000#5000#5004#" COUNTCHILDREN="0" TAXON_ID="5002"   EXCLUDED="FALSE" UNPLACED="FALSE" CLASSIFICATION_ID="5000" PARENT_ID="5000"/>\r
+  <TAXONNODE ID="5001" CREATED="2015-06-18 09:36:39.0" UUID="7b95a2a6-2c6e-4b8e-a91a-7a1d995490f9" SORTINDEX="-1" TREEINDEX="#t5001#5001#"                COUNTCHILDREN="1" TAXON_ID="[null]" EXCLUDED="FALSE" UNPLACED="FALSE" CLASSIFICATION_ID="5001" PARENT_ID="[null]"/>\r
+  <TAXONNODE ID="5007" CREATED="2015-06-18 09:36:40.0" UUID="bcdf945f-1f02-423e-883d-fe89e0af93e4" SORTINDEX="0"  TREEINDEX="#t5001#5001#5007#"           COUNTCHILDREN="1" TAXON_ID="3"      EXCLUDED="FALSE" UNPLACED="FALSE" CLASSIFICATION_ID="5001" PARENT_ID="5001"/>\r
+  <TAXONNODE ID="5006" CREATED="2015-06-18 09:36:40.0" UUID="56b10cf0-9522-407e-9f90-0c2dba263c94" SORTINDEX="0"  TREEINDEX="#t5001#5001#5007#5006#"      COUNTCHILDREN="2" TAXON_ID="15"     EXCLUDED="FALSE" UNPLACED="FALSE" CLASSIFICATION_ID="5001" PARENT_ID="5007"/>\r
+  <TAXONNODE ID="5005" CREATED="2015-06-18 09:36:40.0" UUID="ba290371-a72b-43bf-a913-8a03c79755c7" SORTINDEX="0"  TREEINDEX="#t5001#5001#5007#5006#5005#" COUNTCHILDREN="0" TAXON_ID="35"     EXCLUDED="FALSE" UNPLACED="FALSE" CLASSIFICATION_ID="5001" PARENT_ID="5006"/>\r
+  <TAXONNODE ID="5008" CREATED="2015-06-18 09:36:40.0" UUID="80150ae0-e1e6-42a4-b224-21e099756c3d" SORTINDEX="1"  TREEINDEX="#t5001#5001#5007#5006#5008#" COUNTCHILDREN="0" TAXON_ID="36"     EXCLUDED="FALSE" UNPLACED="FALSE" CLASSIFICATION_ID="5001" PARENT_ID="5006"/>\r
+  \r
+  \r
   <TAXONBASE DTYPE="Taxon" ID="1" SEC_ID="1" CREATED="2008-01-10 09:56:07.0" UUID="496b1325-be50-4b0a-9aa2-3ecd610215f2" UPDATED="2008-12-10 09:56:07.253" TITLECACHE="Aus sec. ???" PROTECTEDTITLECACHE="true" TAXONSTATUSUNKNOWN="false"  PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" NAME_ID="1" />\r
   <TAXONBASE DTYPE="Taxon" ID="3" SEC_ID="2" CREATED="2008-01-12 09:56:07.0" UUID="54e767ee-894e-4540-a758-f906ecb4e2d9" UPDATED="2008-12-10 09:56:07.253" TITLECACHE="Sphingidae Linnaeus, 1758 sec. cate-sphingidae.org" PROTECTEDTITLECACHE="true" TAXONSTATUSUNKNOWN="false"  PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" NAME_ID="3"/>\r
   <TAXONBASE DTYPE="Taxon" ID="5" SEC_ID="2" CREATED="2008-02-04 09:56:07.0" UUID="17233b5e-74e7-42fc-bc37-522684657ed4" UPDATED="2008-12-10 09:56:07.253" TITLECACHE="Smerinthus Latreille, 1802 sec. cate-sphingidae.org" PROTECTEDTITLECACHE="true" PUBLISH="true" TAXONSTATUSUNKNOWN="false"  DOUBTFUL="false" USENAMECACHE="false" NAME_ID="5"/>\r
index 5d713ec292a4de4a52e538b88ab541cdaa325b82..a7c962ec77af4cb22fb0921e243017fcaba52de8 100644 (file)
@@ -3,7 +3,7 @@
   <parent>
     <artifactId>cdmlib-parent</artifactId>
     <groupId>eu.etaxonomy</groupId>
-    <version>5.2.0</version>
+    <version>5.3.0</version>
     <relativePath>../pom.xml</relativePath>
   </parent>
   <modelVersion>4.0.0</modelVersion>
index 791c2b44c230d40e5aced6e135bd7d33253d853a..1a06e6ee56fc0b9668316289e5926309f621ea85 100644 (file)
@@ -163,7 +163,7 @@ public class LocalXMLEntityFactory extends XmlEntityFactoryBase {
         Object resultObject = null;
         try {
             if (EntityType.CLASSIFICATION.equals(entityType)) {
-                resultObject = classificationController.getChildNodes(uuid,
+                resultObject = classificationController.getChildNodes(uuid, null,
                         null, null);
             } else if (EntityType.TAXON_NODE.equals(entityType)) {
                 resultObject = taxonNodePrintAppController
index 27bd7ba92882b7149ca3fdde5cc152b207ef5f1f..5938539900011aed8e44ef392f246183a2f465ea 100755 (executable)
@@ -4,7 +4,7 @@
   <parent>
     <artifactId>cdmlib-parent</artifactId>
     <groupId>eu.etaxonomy</groupId>
-    <version>5.2.0</version>
+    <version>5.3.0</version>
     <relativePath>../pom.xml</relativePath>
   </parent>
 
index 5d573f7148e0a8ce465dc9402118d6d2a4fd0022..d68781f6e979787a0db4ee950dacfc1f48b64e9e 100755 (executable)
@@ -33,8 +33,7 @@ log4j.logger.eu.etaxonomy.cdm = INFO
 #        avoid vast amount of "getCitation not yet implemented" & "getProblems not yet implemented" messages
 log4j.logger.eu.etaxonomy.cdm.model.name.TaxonName = ERROR
 log4j.logger.eu.etaxonomy.cdm.database.UpdatableRoutingDataSource = INFO
-log4j.logger.eu.etaxonomy.cdm.persistence.dao.initializer.AbstractBeanInitializer = ERROR
-log4j.logger.eu.etaxonomy.cdm.persistence.dao.initializer.AdvancedBeanInitializer=ERROR
+log4j.logger.eu.etaxonomy.cdm.persistence.dao.initializer = ERROR
 log4j.logger.eu.etaxonomy.cdm.remote.controller = INFO
 log4j.logger.eu.etaxonomy.cdm.remote.controller.interceptor = WARN
 #log4j.logger.eu.etaxonomy.cdm.remote.json.processor.AbstractCdmBeanProcessor  = DEBUG
index c949371af5cf9ba6747806626d31ad746795aa46..3a2a1e31cf653f75a15c56c847216dcb2224a2b8 100644 (file)
@@ -7,9 +7,6 @@
     http://www.springframework.org/schema/context\r
     http://www.springframework.org/schema/context/spring-context-2.5.xsd">\r
 \r
-    <!-- enable processing of annotations such as @Autowired and @Configuration -->\r
-    <context:annotation-config/>\r
-\r
     <bean class="eu.etaxonomy.cdm.opt.config.DataSourceConfigurer" >\r
     </bean>\r
 \r
index 38bcd29cf8ab34cc293c06cac7fb183bbe62b33d..7a8c4d91211eb105c594739771d080c413bed36b 100644 (file)
@@ -27,7 +27,7 @@ log4j.logger.eu.etaxonomy.cdm = INFO
 #        avoid vast amount of "getCitation not yet implemented" & "getProblems not yet implemented" messages\r
 log4j.logger.eu.etaxonomy.cdm.model.name.TaxonName = ERROR\r
 log4j.logger.eu.etaxonomy.cdm.database.UpdatableRoutingDataSource = INFO\r
-log4j.logger.eu.etaxonomy.cdm.persistence.dao.AbstractBeanInitializer = ERROR\r
+log4j.logger.eu.etaxonomy.cdm.persistence.dao.initializer = ERROR\r
 log4j.logger.eu.etaxonomy.cdm.remote.controller = INFO\r
 log4j.logger.eu.etaxonomy.cdm.remote.controller.interceptor = WARN\r
 #log4j.logger.eu.etaxonomy.cdm.remote.json.processor.AbstractCdmBeanProcessor  = DEBUG\r
index e3f2fa0bf6586ce9e272ce02be4663ae28e35139..f41cdca9f0457dac4d36d4cfb6286bd24142feff 100644 (file)
@@ -4,7 +4,7 @@
   <parent>
     <groupId>eu.etaxonomy</groupId>
     <artifactId>cdmlib-parent</artifactId>
-    <version>5.2.0</version>
+    <version>5.3.0</version>
     <relativePath>../pom.xml</relativePath>
   </parent>
 
index 3c9d37a274a6b4b3b12d1a0dfe2182730e7f5e02..b82857ff56be2dcf1b16d860cfcd8837fa5ed411 100644 (file)
@@ -38,13 +38,17 @@ import org.springframework.web.bind.annotation.RequestMapping;
 import org.springframework.web.bind.annotation.RequestMethod;
 import org.springframework.web.bind.annotation.RequestParam;
 
+import eu.etaxonomy.cdm.api.service.IClassificationService;
 import eu.etaxonomy.cdm.api.service.IService;
+import eu.etaxonomy.cdm.api.service.ITaxonNodeService;
 import eu.etaxonomy.cdm.api.service.pager.Pager;
 import eu.etaxonomy.cdm.api.service.pager.impl.DefaultPagerImpl;
 import eu.etaxonomy.cdm.hibernate.HibernateProxyHelper;
 import eu.etaxonomy.cdm.model.common.CdmBase;
 import eu.etaxonomy.cdm.model.common.IPublishable;
 import eu.etaxonomy.cdm.model.reference.INomenclaturalReference;
+import eu.etaxonomy.cdm.model.taxon.Classification;
+import eu.etaxonomy.cdm.model.taxon.TaxonNode;
 import eu.etaxonomy.cdm.remote.controller.util.PagerParameters;
 import eu.etaxonomy.cdm.remote.editor.UUIDPropertyEditor;
 
@@ -321,6 +325,7 @@ public abstract class BaseController<T extends CdmBase, SERVICE extends IService
             HttpServletResponse response, List<String> pathProperties)
             throws IOException {
 
+        @SuppressWarnings("unused")
         boolean includeUnpublished = NO_UNPUBLISHED;
         CDM_BASE cdmBaseObject;
 //        if (service instanceof IPublishableService){
@@ -425,6 +430,40 @@ public abstract class BaseController<T extends CdmBase, SERVICE extends IService
         return (S)result;
     }
 
+
+    /**
+     * @param subtreeUuid
+     * @param response
+     * @return
+     * @throws IOException
+     */
+    protected TaxonNode getSubtreeOrError(UUID subtreeUuid, ITaxonNodeService taxonNodeService, HttpServletResponse response) throws IOException {
+        TaxonNode subtree = null;
+        if (subtreeUuid != null){
+            subtree = taxonNodeService.find(subtreeUuid);
+            if(subtree == null) {
+                response.sendError(404 , "TaxonNode not found using " + subtreeUuid );
+                //will not happen
+                return null;
+            }
+        }
+        return subtree;
+    }
+
+    protected Classification getClassificationOrError(UUID classificationUuid,
+            IClassificationService classificationService, HttpServletResponse response) throws IOException {
+        Classification classification = null;
+        if (classificationUuid != null){
+            classification = classificationService.find(classificationUuid);
+            if(classification == null) {
+                response.sendError(404 , "Classification not found: " + classificationUuid );
+                //will not happen
+                return null;
+            }
+        }
+        return classification;
+    }
+
       /* TODO implement
 
       private Validator validator;
index 1fb34b393efdd7342ce032e069afbd9b63244cdf..ac52d252126f53cf745c39af47f0b5a3ca1ca1cf 100644 (file)
@@ -24,9 +24,13 @@ import org.springframework.web.bind.annotation.RequestMapping;
 import org.springframework.web.bind.annotation.RequestMethod;\r
 import org.springframework.web.bind.annotation.RequestParam;\r
 \r
+import eu.etaxonomy.cdm.api.service.IClassificationService;\r
 import eu.etaxonomy.cdm.api.service.IService;\r
+import eu.etaxonomy.cdm.api.service.ITaxonNodeService;\r
 import eu.etaxonomy.cdm.api.service.pager.Pager;\r
 import eu.etaxonomy.cdm.model.common.CdmBase;\r
+import eu.etaxonomy.cdm.model.taxon.Classification;\r
+import eu.etaxonomy.cdm.model.taxon.TaxonNode;\r
 import eu.etaxonomy.cdm.remote.controller.util.PagerParameters;\r
 import eu.etaxonomy.cdm.remote.editor.CdmTypePropertyEditor;\r
 import eu.etaxonomy.cdm.remote.editor.UUIDPropertyEditor;\r
@@ -132,6 +136,35 @@ public abstract class BaseListController <T extends CdmBase, SERVICE extends ISe
         return service.list(type, limit, start, null, getInitializationStrategy());\r
     }\r
 \r
+    // this is a copy from BaseController, should be unified\r
+    protected TaxonNode getSubtreeOrError(UUID subtreeUuid, ITaxonNodeService taxonNodeService, HttpServletResponse response) throws IOException {\r
+        TaxonNode subtree = null;\r
+        if (subtreeUuid != null){\r
+            subtree = taxonNodeService.find(subtreeUuid);\r
+            if(subtree == null) {\r
+                response.sendError(404 , "Taxon node for subtree not found: " + subtreeUuid );\r
+                //will not happen\r
+                return null;\r
+            }\r
+        }\r
+        return subtree;\r
+    }\r
+\r
+    // this is a copy from BaseController, should be unified\r
+    protected Classification getClassificationOrError(UUID classificationUuid,\r
+            IClassificationService classificationService, HttpServletResponse response) throws IOException {\r
+        Classification classification = null;\r
+        if (classificationUuid != null){\r
+            classification = classificationService.find(classificationUuid);\r
+            if(classification == null) {\r
+                response.sendError(404 , "Classification not found: " + classificationUuid );\r
+                //will not happen\r
+                return null;\r
+            }\r
+        }\r
+        return classification;\r
+    }\r
+\r
   /* TODO\r
    @RequestMapping(method = RequestMethod.POST)\r
   public T doPost(@ModelAttribute("object") T object, BindingResult result) {\r
index a91c901a75a6dbfac041143b8da68d259ed4ffa0..bb8b6a6f5f17141cf5c9257b22b69b5a104e892a 100644 (file)
@@ -76,6 +76,9 @@ public class ClassificationController extends AbstractIdentifiableController<Cla
     public void setTaxonNodeService(ITaxonNodeService taxonNodeService) {
         this.taxonNodeService = taxonNodeService;
     }
+    protected ITaxonNodeService getTaxonNodeService() {
+        return this.taxonNodeService;
+    }
 
 
     @InitBinder
@@ -103,11 +106,12 @@ public class ClassificationController extends AbstractIdentifiableController<Cla
             method = RequestMethod.GET)
     public List<TaxonNode> getChildNodes(
             @PathVariable("uuid") UUID classificationUuid,
+            @RequestParam(value = "subtree", required = false) UUID subtreeUuid,
             HttpServletRequest request,
             HttpServletResponse response
             ) throws IOException {
 
-        return getChildNodesAtRank(classificationUuid, null, request, response);
+        return getChildNodesAtRank(classificationUuid, null, subtreeUuid, request, response);
     }
 
     @RequestMapping(
@@ -116,6 +120,7 @@ public class ClassificationController extends AbstractIdentifiableController<Cla
     public List<TaxonNode> getChildNodesAtRank(
             @PathVariable("uuid") UUID classificationUuid,
             @PathVariable("rankUuid") UUID rankUuid,
+            @RequestParam(value = "subtree", required = false) UUID subtreeUuid,
             HttpServletRequest request,
             HttpServletResponse response
             ) throws IOException {
@@ -128,16 +133,21 @@ public class ClassificationController extends AbstractIdentifiableController<Cla
             response.sendError(404 , "Classification not found using " + classificationUuid );
             return null;
         }
+
+        TaxonNode subtree = getSubtreeOrError(subtreeUuid, taxonNodeService, response);
+
         Rank rank = findRank(rankUuid);
 
         boolean includeUnpublished = NO_UNPUBLISHED;
 //        long start = System.currentTimeMillis();
-        List<TaxonNode> rootNodes = service.listRankSpecificRootNodes(classification, rank, includeUnpublished, null, null, NODE_INIT_STRATEGY());
+        List<TaxonNode> rootNodes = service.listRankSpecificRootNodes(classification, subtree, rank,
+                includeUnpublished, null, null, NODE_INIT_STRATEGY());
 //        System.err.println("service.listRankSpecificRootNodes() " + (System.currentTimeMillis() - start));
 
         return rootNodes;
     }
 
+
     /**
     *
     * @param uuid
index 53c6e81963f0923753d8053feb4b3f10350fae12..62e30d45884f4bd199ead8351b8ececd6e0d7670 100644 (file)
@@ -24,10 +24,13 @@ import org.springframework.web.bind.annotation.InitBinder;
 import org.springframework.web.bind.annotation.PathVariable;\r
 import org.springframework.web.bind.annotation.RequestMapping;\r
 import org.springframework.web.bind.annotation.RequestMethod;\r
+import org.springframework.web.bind.annotation.RequestParam;\r
 \r
 import eu.etaxonomy.cdm.api.service.IClassificationService;\r
+import eu.etaxonomy.cdm.api.service.ITaxonNodeService;\r
 import eu.etaxonomy.cdm.api.service.ITaxonService;\r
 import eu.etaxonomy.cdm.api.service.ITermService;\r
+import eu.etaxonomy.cdm.exception.FilterException;\r
 import eu.etaxonomy.cdm.exception.UnpublishedException;\r
 import eu.etaxonomy.cdm.model.common.DefinedTermBase;\r
 import eu.etaxonomy.cdm.model.name.Rank;\r
@@ -62,6 +65,7 @@ public class ClassificationPortalListController extends AbstractIdentifiableList
     public static final Logger logger = Logger.getLogger(ClassificationPortalListController.class);\r
 \r
     private ITaxonService taxonService;\r
+    private ITaxonNodeService taxonNodeService;\r
 \r
     private ITermService termService;\r
 \r
@@ -85,6 +89,11 @@ public class ClassificationPortalListController extends AbstractIdentifiableList
         this.taxonService = taxonService;\r
     }\r
 \r
+    @Autowired\r
+    public void setTaxonNodeService(ITaxonNodeService taxonNodeService) {\r
+        this.taxonNodeService = taxonNodeService;\r
+    }\r
+\r
 \r
     @InitBinder\r
     @Override\r
@@ -107,11 +116,12 @@ public class ClassificationPortalListController extends AbstractIdentifiableList
             method = RequestMethod.GET)\r
     public List<TaxonNode> getChildNodes(\r
             @PathVariable("treeUuid") UUID treeUuid,\r
+            @RequestParam(value = "subtree", required = false) UUID subtreeUuid,\r
             HttpServletRequest request,\r
             HttpServletResponse response\r
             ) throws IOException {\r
 \r
-        return getChildNodesAtRank(treeUuid, null, request, response);\r
+        return getChildNodesAtRank(treeUuid, null, subtreeUuid, request, response);\r
     }\r
 \r
 \r
@@ -132,25 +142,27 @@ public class ClassificationPortalListController extends AbstractIdentifiableList
     public List<TaxonNode> getChildNodesAtRank(\r
             @PathVariable("treeUuid") UUID treeUuid,\r
             @PathVariable("rankUuid") UUID rankUuid,\r
+            @RequestParam(value = "subtree", required = false) UUID subtreeUuid,\r
             HttpServletRequest request,\r
             HttpServletResponse response\r
             ) throws IOException {\r
 \r
         logger.info("getChildNodesAtRank() " + request.getRequestURI());\r
-        Classification tree = null;\r
+        Classification classification = null;\r
         Rank rank = null;\r
         if(treeUuid != null){\r
-            tree = service.find(treeUuid);\r
-            if(tree == null) {\r
+            classification = service.find(treeUuid);\r
+            if(classification == null) {\r
                 HttpStatusMessage.UUID_NOT_FOUND.send(response, "Classification not found using " + treeUuid);\r
                 return null;\r
             }\r
         }\r
+        TaxonNode subtree = getSubtreeOrError(subtreeUuid, taxonNodeService, response);\r
 \r
         rank = findRank(rankUuid);\r
         boolean includeUnpublished = NO_UNPUBLISHED;\r
 //        long start = System.currentTimeMillis();\r
-        List<TaxonNode> rootNodes = service.listRankSpecificRootNodes(tree, rank, includeUnpublished, null, null, NODE_INIT_STRATEGY);\r
+        List<TaxonNode> rootNodes = service.listRankSpecificRootNodes(classification, subtree, rank, includeUnpublished, null, null, NODE_INIT_STRATEGY);\r
 //        System.err.println("service.listRankSpecificRootNodes() " + (System.currentTimeMillis() - start));\r
         return rootNodes;\r
     }\r
@@ -179,14 +191,21 @@ public class ClassificationPortalListController extends AbstractIdentifiableList
     public List<TaxonNode> getChildNodesOfTaxon(\r
             @PathVariable("treeUuid") UUID treeUuid,\r
             @PathVariable("taxonUuid") UUID taxonUuid,\r
+            @RequestParam(value = "subtree", required = false) UUID subtreeUuid,\r
             HttpServletRequest request,\r
             HttpServletResponse response) throws IOException {\r
         logger.info("getChildNodesOfTaxon() " + request.getRequestURI());\r
 \r
         boolean includeUnpublished = NO_UNPUBLISHED;  //for now we do not allow any remote service to publish unpublished data\r
 \r
-        List<TaxonNode> children = service.listChildNodesOfTaxon(taxonUuid, treeUuid,\r
-                includeUnpublished, null, null, NODE_INIT_STRATEGY);\r
+        List<TaxonNode> children;\r
+        try {\r
+            children = service.listChildNodesOfTaxon(taxonUuid, treeUuid, subtreeUuid,\r
+                    includeUnpublished, null, null, NODE_INIT_STRATEGY);\r
+        } catch (FilterException e) {\r
+            HttpStatusMessage.SUBTREE_FILTER_INVALID.send(response);\r
+            return null;\r
+        }\r
         return children;\r
 \r
     }\r
@@ -233,18 +252,21 @@ public class ClassificationPortalListController extends AbstractIdentifiableList
             @PathVariable("treeUuid") UUID treeUuid,\r
             @PathVariable("taxonUuid") UUID taxonUuid,\r
             @PathVariable("rankUuid") UUID rankUuid,\r
+            @RequestParam(value = "subtree", required = false) UUID subtreeUuid,\r
             HttpServletRequest request,\r
             HttpServletResponse response) throws IOException {\r
         logger.info("getPathFromTaxonToRank() " + request.getRequestURI());\r
 \r
         boolean includeUnpublished = NO_UNPUBLISHED;\r
 \r
-        Classification tree = service.find(treeUuid);\r
+        Classification classification = service.find(treeUuid);\r
+        TaxonNode subtree = getSubtreeOrError(subtreeUuid, taxonNodeService, response);\r
         Rank rank = findRank(rankUuid);\r
         Taxon taxon = (Taxon) taxonService.load(taxonUuid);\r
 \r
         try {\r
-            return service.loadTreeBranchToTaxon(taxon, tree, rank, includeUnpublished, NODE_INIT_STRATEGY);\r
+            List<TaxonNode> result = service.loadTreeBranchToTaxon(taxon, classification, subtree, rank, includeUnpublished, NODE_INIT_STRATEGY);\r
+            return result;\r
         } catch (UnpublishedException e) {\r
             HttpStatusMessage.ACCESS_DENIED.send(response);\r
             return null;\r
@@ -272,12 +294,13 @@ public class ClassificationPortalListController extends AbstractIdentifiableList
             value = {"{treeUuid}/pathFrom/{taxonUuid}"},\r
             method = RequestMethod.GET)\r
     public List<TaxonNode> getPathFromTaxon(\r
-            @PathVariable("treeUuid") UUID treeUuid,\r
+            @PathVariable("treeUuid") UUID classificationUuid,\r
             @PathVariable("taxonUuid") UUID taxonUuid,\r
+            @RequestParam(value = "subtree", required = false) UUID subtreeUuid,\r
             HttpServletRequest request,\r
             HttpServletResponse response) throws IOException {\r
 \r
-        return getPathFromTaxonToRank(treeUuid, taxonUuid, null, request, response);\r
+        return getPathFromTaxonToRank(classificationUuid, taxonUuid, null, subtreeUuid, request, response);\r
     }\r
 \r
 \r
index e767731aa2634ce9a8e4a95f26fdada0e56c8a9d..db0c413df6d63b997e60aacda02d469cc14a3fac 100644 (file)
@@ -8,8 +8,6 @@
 
 package eu.etaxonomy.cdm.remote.controller;
 
-import io.swagger.annotations.Api;
-
 import java.io.IOException;
 import java.util.Arrays;
 import java.util.List;
@@ -48,6 +46,7 @@ import eu.etaxonomy.cdm.remote.editor.TermBasePropertyEditor;
 import eu.etaxonomy.cdm.remote.editor.UUIDListPropertyEditor;
 import eu.etaxonomy.cdm.remote.editor.UUIDPropertyEditor;
 import eu.etaxonomy.cdm.remote.editor.UuidList;
+import io.swagger.annotations.Api;
 
 /**
  * TODO write controller documentation
@@ -207,7 +206,7 @@ public class DescriptionElementListController {
     * @throws IOException
     */
    @RequestMapping(value = "find", method = RequestMethod.GET) // mapped as absolute path, see CdmAntPathMatcher
-   public Pager<DescriptionElementBase> doFindDescriptionElements(
+   public Pager<? extends DescriptionElementBase> doFindDescriptionElements(
            @RequestParam(value = "query", required = true) String queryString,
            @RequestParam(value = "type", required = false) Class<? extends DescriptionElementBase> type,
            @RequestParam(value = "pageSize", required = false) Integer pageSize,
@@ -223,7 +222,7 @@ public class DescriptionElementListController {
        PagerParameters pagerParams = new PagerParameters(pageSize, pageNumber);
        pagerParams.normalizeAndValidate(response);
 
-       Pager<DescriptionElementBase> pager = service.searchElements(type, queryString, pageSize, pageNumber, null, getInitializationStrategy());
+       Pager<? extends DescriptionElementBase> pager = service.searchElements(type, queryString, pageSize, pageNumber, null, getInitializationStrategy());
 
        return pager;
    }
index e028a4d0a1a94fc2e6acfd37cb4ac559b1c88f5d..96f2ea901e7c8290389d40a819638782106ea002 100644 (file)
@@ -35,6 +35,7 @@ public class HttpStatusMessage {
     public final static HttpStatusMessage INTERNAL_ERROR = new HttpStatusMessage(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, "internal server error");\r
 \r
     public final static HttpStatusMessage ACCESS_DENIED = new HttpStatusMessage(HttpServletResponse.SC_FORBIDDEN, "access denied");\r
+    public final static HttpStatusMessage SUBTREE_FILTER_INVALID = new HttpStatusMessage(HttpServletResponse.SC_NOT_FOUND, "invalid uuid for subtree filter");\r
 \r
 \r
     private int statusCode;\r
index 8e896fe09e217ecaad37d7b433d05323dc45f74a..d3f4ccfae7be674d2d50ff16a75dc01c27e2954f 100644 (file)
@@ -110,7 +110,7 @@ public class OccurrenceController extends AbstractIdentifiableController<Specime
         FieldUnitDTO fieldUnitDto = null;\r
         if(sob instanceof DerivedUnit){\r
 \r
-            fieldUnitDto = service.findFieldUnitDTO(PreservedSpecimenDTO.newInstance(sob) , new ArrayList<FieldUnitDTO>(), new HashMap<UUID, DerivateDTO>());\r
+            fieldUnitDto = service.findFieldUnitDTO(new PreservedSpecimenDTO(sob) , new ArrayList<FieldUnitDTO>(), new HashMap<UUID, DerivateDTO>());\r
 \r
         }\r
 \r
index 91acb18fe63e6ba546e2d413e1132ec8c9774ac1..07ab8afb2db2b796024bd138caab0b7bc4b44fbd 100644 (file)
@@ -72,6 +72,8 @@ public class RegistrationController extends BaseController<Registration, IRegist
             HttpServletRequest request,
             HttpServletResponse response) throws IOException {
 
+        logger.info("doGet() " + requestPathAndQuery(request));
+
         Registration reg = super.doGet(uuid, request, response);
         if(reg != null){
             if(userHelper.userIsAutheticated() && userHelper.userIsAnnonymous() && !reg.getStatus().equals(RegistrationStatus.PUBLISHED)) {
index 68f1510fbb07f849905e6d48ec96caa870b771dd..59496592657aa8850fe412b23168450bd91659aa 100644 (file)
@@ -44,6 +44,7 @@ import eu.etaxonomy.cdm.api.service.dto.IncludedTaxaDTO;
 import eu.etaxonomy.cdm.api.service.dto.TaxonRelationshipsDTO;\r
 import eu.etaxonomy.cdm.api.service.pager.Pager;\r
 import eu.etaxonomy.cdm.exception.UnpublishedException;\r
+import eu.etaxonomy.cdm.model.common.CdmBase;\r
 import eu.etaxonomy.cdm.model.common.DefinedTermBase;\r
 import eu.etaxonomy.cdm.model.common.MarkerType;\r
 import eu.etaxonomy.cdm.model.common.RelationshipBase.Direction;\r
@@ -51,6 +52,7 @@ import eu.etaxonomy.cdm.model.description.DescriptionElementBase;
 import eu.etaxonomy.cdm.model.description.TaxonDescription;\r
 import eu.etaxonomy.cdm.model.occurrence.SpecimenOrObservationBase;\r
 import eu.etaxonomy.cdm.model.taxon.Classification;\r
+import eu.etaxonomy.cdm.model.taxon.Synonym;\r
 import eu.etaxonomy.cdm.model.taxon.Taxon;\r
 import eu.etaxonomy.cdm.model.taxon.TaxonBase;\r
 import eu.etaxonomy.cdm.model.taxon.TaxonNode;\r
@@ -94,7 +96,7 @@ public class TaxonController extends AbstractIdentifiableController<TaxonBase, I
     private ITermService termService;\r
 \r
     protected static final List<String> TAXONNODE_INIT_STRATEGY = Arrays.asList(new String []{\r
-            "taxonNodes"\r
+            "taxonNodes.classification"\r
     });\r
 \r
     public TaxonController(){\r
@@ -129,6 +131,53 @@ public class TaxonController extends AbstractIdentifiableController<TaxonBase, I
         return getInitializationStrategy();\r
     }\r
 \r
+    @RequestMapping(params="subtree", method = RequestMethod.GET)\r
+    public TaxonBase<?> doGet(@PathVariable("uuid") UUID uuid,\r
+            @RequestParam(value = "subtree", required = true) UUID subtreeUuid,  //if subtree does not exist the base class method is used, therefore required\r
+            HttpServletRequest request,\r
+            HttpServletResponse response) throws IOException {\r
+        if(request != null) {\r
+            logger.info("doGet() " + requestPathAndQuery(request));\r
+        }\r
+        //TODO do we want to allow Synonyms at all? Maybe needs initialization\r
+        TaxonBase<?> taxonBase = getCdmBaseInstance(uuid, response, TAXONNODE_INIT_STRATEGY);\r
+        //TODO we should move subtree check down to service or persistence\r
+        TaxonNode subtree = getSubtreeOrError(subtreeUuid, nodeService, response);\r
+        taxonBase = checkExistsSubtreeAndAccess(taxonBase, subtree, NO_UNPUBLISHED, response);\r
+        return taxonBase;\r
+    }\r
+\r
+    /**\r
+     * Checks if a {@link TaxonBase taxonBase} is public and belongs to a {@link TaxonNode subtree}\r
+     * as accepted taxon or synonym.\r
+     * If not the according {@link HttpStatusMessage http messages} are added to response.\r
+     * <BR>\r
+     * Not (yet) checked is the relation to a subtree via a concept relationship.\r
+     * @param taxonBase\r
+     * @param includeUnpublished\r
+     * @param response\r
+     * @return\r
+     * @throws IOException\r
+     */\r
+    protected <S extends TaxonBase<?>> S checkExistsSubtreeAndAccess(S taxonBase, TaxonNode subtree, boolean includeUnpublished,\r
+            HttpServletResponse response) throws IOException {\r
+        taxonBase = checkExistsAndAccess(taxonBase, NO_UNPUBLISHED, response);\r
+        if (taxonBase != null){\r
+            //TODO synonyms maybe can not be initialized\r
+            Taxon taxon = taxonBase.isInstanceOf(Synonym.class)?\r
+                    CdmBase.deproxy(taxonBase, Synonym.class).getAcceptedTaxon():\r
+                    CdmBase.deproxy(taxonBase, Taxon.class);\r
+            //check if taxon has any node that is a descendant of subtree\r
+            for (TaxonNode taxonNode :taxon.getTaxonNodes()){\r
+                if (subtree.isAncestor(taxonNode)){\r
+                    return taxonBase;\r
+                }\r
+            }\r
+            HttpStatusMessage.ACCESS_DENIED.send(response);\r
+        }\r
+        return null;\r
+    }\r
+\r
 \r
     /**\r
      * Get the accepted {@link Taxon} for a given\r
@@ -187,12 +236,19 @@ public class TaxonController extends AbstractIdentifiableController<TaxonBase, I
     @RequestMapping(value = "taxonNodes", method = RequestMethod.GET)\r
     public Set<TaxonNode>  doGetTaxonNodes(\r
             @PathVariable("uuid") UUID uuid,\r
+            @RequestParam(value = "subtree", required = false) UUID subtreeUuid,\r
             HttpServletRequest request,\r
             HttpServletResponse response) throws IOException {\r
 \r
-        TaxonBase<?> tb = service.load(uuid, NO_UNPUBLISHED, TAXONNODE_INIT_STRATEGY);\r
-        if(tb instanceof Taxon){\r
-            return ((Taxon)tb).getTaxonNodes();\r
+        logger.info("doGetTaxonNodes" + requestPathAndQuery(request));\r
+        TaxonBase<?> taxonBase;\r
+        if (subtreeUuid != null){\r
+            taxonBase = doGet(uuid, subtreeUuid, request, response);\r
+        }else{\r
+            taxonBase = service.load(uuid, NO_UNPUBLISHED, TAXONNODE_INIT_STRATEGY);\r
+        }\r
+        if(taxonBase instanceof Taxon){\r
+            return ((Taxon)taxonBase).getTaxonNodes();\r
         } else {\r
             HttpStatusMessage.UUID_REFERENCES_WRONG_TYPE.send(response);\r
             return null;\r
@@ -470,7 +526,7 @@ public class TaxonController extends AbstractIdentifiableController<TaxonBase, I
             @RequestParam(value = "directTypes", required = false) UuidList directTypeUuids,\r
             @RequestParam(value = "inversTypes", required = false) UuidList inversTypeUuids,\r
             @RequestParam(value = "direction", required = false) Direction direction,\r
-            @RequestParam(value="groupMisapplications", required=false, defaultValue="true") final boolean groupMisapplications,\r
+            @RequestParam(value="groupMisapplications", required=false, defaultValue="false") final boolean groupMisapplications,\r
             HttpServletRequest request,\r
             HttpServletResponse response) throws IOException {\r
 \r
index 318510aa3dba6611dac90aac42d09a58c339684f..aa6c19b81dfaeacdffca888da158909d94ea90c5 100644 (file)
@@ -83,15 +83,11 @@ import io.swagger.annotations.Api;
 @RequestMapping(value = {"/taxon"})
 public class TaxonListController extends AbstractIdentifiableListController<TaxonBase, ITaxonService> {
 
-
     private static final List<String> SIMPLE_TAXON_INIT_STRATEGY = DEFAULT_INIT_STRATEGY;
     protected List<String> getSimpleTaxonInitStrategy() {
         return SIMPLE_TAXON_INIT_STRATEGY;
     }
 
-    /**
-     *
-     */
     public TaxonListController(){
         super();
         setInitializationStrategy(Arrays.asList(new String[]{"$","name.nomenclaturalReference"}));
@@ -109,7 +105,6 @@ public class TaxonListController extends AbstractIdentifiableListController<Taxo
     @Autowired
     private ITaxonNodeService taxonNodeService;
 
-
     @Autowired
     private ITermService termService;
 
@@ -121,7 +116,6 @@ public class TaxonListController extends AbstractIdentifiableListController<Taxo
         binder.registerCustomEditor(MatchMode.class, new MatchModePropertyEditor());
         binder.registerCustomEditor(Rank.class, new RankPropertyEditor());
         binder.registerCustomEditor(PresenceAbsenceTerm.class, new TermBasePropertyEditor<PresenceAbsenceTerm>(termService));
-
     }
 
     /**
@@ -162,7 +156,8 @@ public class TaxonListController extends AbstractIdentifiableListController<Taxo
     @RequestMapping(method = RequestMethod.GET, value={"search"})
     public Pager<SearchResult<TaxonBase>> doSearch(
             @RequestParam(value = "query", required = true) String query,
-            @RequestParam(value = "classificationUuid", required = false) UUID classificationUuid,
+            @RequestParam(value = "tree", required = false) UUID classificationUuid,
+            @RequestParam(value = "subtree", required = false) UUID subtreeUuid,
             @RequestParam(value = "area", required = false) DefinedTermBaseList<NamedArea> areaList,
             @RequestParam(value = "status", required = false) PresenceAbsenceTerm[] status,
             @RequestParam(value = "pageNumber", required = false) Integer pageNumber,
@@ -208,7 +203,8 @@ public class TaxonListController extends AbstractIdentifiableListController<Taxo
             searchModes.add(TaxaAndNamesSearchMode.includeUnpublished);
         }
 
-        Classification classification = classificationService.load(classificationUuid);
+        Classification classification = getClassificationOrError(classificationUuid, classificationService, response);
+        TaxonNode subtree = getSubtreeOrError(subtreeUuid, taxonNodeService, response);
 
         Set<PresenceAbsenceTerm> statusSet = null;
         if(status != null) {
@@ -216,7 +212,7 @@ public class TaxonListController extends AbstractIdentifiableListController<Taxo
         }
 
         return service.findTaxaAndNamesByFullText(searchModes, query,
-                classification, areaSet, statusSet, null,
+                classification, subtree, areaSet, statusSet, null,
                 false, pagerParams.getPageSize(), pagerParams.getPageIndex(),
                 OrderHint.NOMENCLATURAL_SORT_ORDER.asList(), getSimpleTaxonInitStrategy());
     }
@@ -259,7 +255,8 @@ public class TaxonListController extends AbstractIdentifiableListController<Taxo
     @RequestMapping(method = RequestMethod.GET, value={"find"})
     public Pager<IdentifiableEntity> doFind(
             @RequestParam(value = "query", required = true) String query,
-            @RequestParam(value = "tree", required = false) UUID treeUuid,
+            @RequestParam(value = "tree", required = false) UUID classificationUuid,
+            @RequestParam(value = "subtree", required = false) UUID subtreeUuid,
             @RequestParam(value = "area", required = false) Set<NamedArea> areas,
             @RequestParam(value = "pageNumber", required = false) Integer pageNumber,
             @RequestParam(value = "pageSize", required = false) Integer pageSize,
@@ -297,11 +294,14 @@ public class TaxonListController extends AbstractIdentifiableListController<Taxo
         config.setNamedAreas(areas);
         config.setDoIncludeAuthors(includeAuthors != null ? includeAuthors : Boolean.FALSE);
         config.setOrder(order);
-        if(treeUuid != null){
-            Classification classification = classificationService.find(treeUuid);
+        if(classificationUuid != null){
+            Classification classification = classificationService.find(classificationUuid);
             config.setClassification(classification);
         }
 
+        TaxonNode subtree = getSubtreeOrError(subtreeUuid, taxonNodeService, response);
+        config.setSubtree(subtree);
+
         return service.findTaxaAndNames(config);
 
     }
@@ -325,6 +325,7 @@ public class TaxonListController extends AbstractIdentifiableListController<Taxo
             @RequestParam(value = "clazz", required = false) Class<? extends DescriptionElementBase> clazz,
             @RequestParam(value = "query", required = true) String queryString,
             @RequestParam(value = "tree", required = false) UUID treeUuid,
+            @RequestParam(value = "subtree", required = false) UUID subtreeUuid,
             @RequestParam(value = "features", required = false) UuidList featureUuids,
             @RequestParam(value = "languages", required = false) List<Language> languages,
             @RequestParam(value = "hl", required = false) Boolean highlighting,
@@ -345,30 +346,32 @@ public class TaxonListController extends AbstractIdentifiableListController<Taxo
          }
 
          Classification classification = null;
-        if(treeUuid != null){
+         if(treeUuid != null){
             classification = classificationService.find(treeUuid);
-        }
+         }
+         TaxonNode subtree = getSubtreeOrError(subtreeUuid, taxonNodeService, response);
 
-        List<Feature> features = null;
-        if(featureUuids != null){
+         List<Feature> features = null;
+         if(featureUuids != null){
             features = new ArrayList<>(featureUuids.size());
             for(UUID uuid : featureUuids){
                 features.add((Feature) termService.find(uuid));
             }
-        }
+         }
 
-        Pager<SearchResult<TaxonBase>> pager = service.findByDescriptionElementFullText(
-                clazz, queryString, classification, features, languages, highlighting,
+         Pager<SearchResult<TaxonBase>> pager = service.findByDescriptionElementFullText(
+                clazz, queryString, classification, subtree, features, languages, highlighting,
                 pagerParams.getPageSize(), pagerParams.getPageIndex(),
                 ((List<OrderHint>)null), getSimpleTaxonInitStrategy());
-        return pager;
+         return pager;
     }
 
     @RequestMapping(method = RequestMethod.GET, value={"findByFullText"})
     public Pager<SearchResult<TaxonBase>> doFindByFullText(
             @RequestParam(value = "clazz", required = false) Class<? extends TaxonBase> clazz,
             @RequestParam(value = "query", required = true) String queryString,
-            @RequestParam(value = "tree", required = false) UUID treeUuid,
+            @RequestParam(value = "tree", required = false) UUID classificationUuid,
+            @RequestParam(value = "subtree", required = false) UUID subtreeUuid,
             @RequestParam(value = "languages", required = false) List<Language> languages,
             @RequestParam(value = "hl", required = false) Boolean highlighting,
             @RequestParam(value = "pageNumber", required = false) Integer pageNumber,
@@ -390,11 +393,13 @@ public class TaxonListController extends AbstractIdentifiableListController<Taxo
         }
 
         Classification classification = null;
-        if(treeUuid != null){
-            classification = classificationService.find(treeUuid);
+        if(classificationUuid != null){
+            classification = classificationService.find(classificationUuid);
         }
+        TaxonNode subtree = getSubtreeOrError(subtreeUuid, taxonNodeService, response);
 
-        Pager<SearchResult<TaxonBase>> pager = service.findByFullText(clazz, queryString, classification, includeUnpublished,
+        Pager<SearchResult<TaxonBase>> pager = service.findByFullText(clazz, queryString, classification, subtree,
+                includeUnpublished,
                 languages, highlighting, pagerParams.getPageSize(), pagerParams.getPageIndex(), ((List<OrderHint>) null),
                 initializationStrategy);
         return pager;
@@ -409,6 +414,7 @@ public class TaxonListController extends AbstractIdentifiableListController<Taxo
             @RequestParam(value = "clazz", required = false) Class<? extends TaxonBase> clazz,
             @RequestParam(value = "query", required = true) String queryString,
             @RequestParam(value = "tree", required = false) UUID treeUuid,
+            @RequestParam(value = "subtree", required = false) UUID subtreeUuid,
             @RequestParam(value = "languages", required = false) List<Language> languages,
             @RequestParam(value = "hl", required = false) Boolean highlighting,
             @RequestParam(value = "pageNumber", required = false) Integer pageNumber,
@@ -430,15 +436,16 @@ public class TaxonListController extends AbstractIdentifiableListController<Taxo
          }
 
          Classification classification = null;
-        if(treeUuid != null){
+         if(treeUuid != null){
             classification = classificationService.find(treeUuid);
-        }
+         }
+         TaxonNode subtree = getSubtreeOrError(subtreeUuid, taxonNodeService, response);
 
-        Pager<SearchResult<TaxonBase>> pager = service.findByEverythingFullText(
-                queryString, classification, includeUnpublished, languages, highlighting,
+         Pager<SearchResult<TaxonBase>> pager = service.findByEverythingFullText(
+                queryString, classification, subtree, includeUnpublished, languages, highlighting,
                 pagerParams.getPageSize(), pagerParams.getPageIndex(),
                 ((List<OrderHint>)null), initializationStrategy);
-        return pager;
+         return pager;
     }
 
     /**
index 3dafa575150a5b0a926a84397439d30bbf7f10db..702104a91895793d9919005f72f8d9ad90dfafad 100644 (file)
@@ -12,8 +12,10 @@ package eu.etaxonomy.cdm.remote.controller;
 import java.io.IOException;
 import java.util.UUID;
 
+import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 
+import org.apache.log4j.Logger;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.stereotype.Controller;
 import org.springframework.web.bind.annotation.PathVariable;
@@ -40,6 +42,8 @@ import io.swagger.annotations.Api;
 @RequestMapping(value = {"/taxonNode/{uuid}"})
 public class TaxonNodeController extends AbstractController<TaxonNode, ITaxonNodeService> {
 
+    public static final Logger logger = Logger.getLogger(TaxonNodeController.class);
+
     @Override
     @Autowired
     public void setService(ITaxonNodeService service) {
@@ -59,12 +63,26 @@ public class TaxonNodeController extends AbstractController<TaxonNode, ITaxonNod
             method = RequestMethod.GET)
     public TaxonNodeDto doGetParent(
             @PathVariable("uuid") UUID uuid,
+            HttpServletRequest request,
             HttpServletResponse response
             ) throws IOException {
 
+        logger.info("doGetParent() " + requestPathAndQuery(request));
         return service.parentDto(uuid);
     }
 
+    @RequestMapping(
+            method = RequestMethod.GET)
+    public TaxonNodeDto doGet(
+            @PathVariable("uuid") UUID uuid,
+            HttpServletRequest request,
+            HttpServletResponse response
+            ) throws IOException {
+
+        logger.info("doGet() " + requestPathAndQuery(request));
+        return service.dto(uuid);
+    }
+
     /**
      *
      * @param uuid
index 03b5b53a459bdbafcd7cc4e56ccba865a019bdbd..8a39ccd041fe0ead0623883ac0c336e4b11f851b 100644 (file)
@@ -38,7 +38,6 @@ import eu.etaxonomy.cdm.api.service.ITaxonNodeService;
 import eu.etaxonomy.cdm.api.service.ITaxonService;
 import eu.etaxonomy.cdm.api.service.ITermService;
 import eu.etaxonomy.cdm.api.service.util.TaxonRelationshipEdge;
-import eu.etaxonomy.cdm.database.UpdatableRoutingDataSource;
 import eu.etaxonomy.cdm.model.common.RelationshipBase.Direction;
 import eu.etaxonomy.cdm.model.location.NamedArea;
 import eu.etaxonomy.cdm.model.media.Media;
@@ -227,12 +226,6 @@ public class TaxonPortalController extends TaxonController{
             "childNodes.taxon",
     });
 
-    protected static final List<String> TAXONNODE_INIT_STRATEGY = Arrays.asList(new String []{
-            "taxonNodes.classification"
-    });
-
-
-
     private static final String featureTreeUuidPattern = "^/taxon(?:(?:/)([^/?#&\\.]+))+.*";
 
 
@@ -454,22 +447,6 @@ public class TaxonPortalController extends TaxonController{
         return list;
     }
 
-    @Override
-    @RequestMapping(value = "taxonNodes", method = RequestMethod.GET)
-    public Set<TaxonNode>  doGetTaxonNodes(
-            @PathVariable("uuid") UUID uuid,
-            HttpServletRequest request,
-            HttpServletResponse response) throws IOException {
-
-        logger.info("doGetTaxonNodes" + requestPathAndQuery(request));
-        TaxonBase<?> taxon = service.load(uuid, NO_UNPUBLISHED, TAXONNODE_INIT_STRATEGY);
-        if(taxon instanceof Taxon){
-            return ((Taxon)taxon).getTaxonNodes();
-        } else {
-            HttpStatusMessage.UUID_REFERENCES_WRONG_TYPE.send(response);
-            return null;
-        }
-    }
 
 //     @RequestMapping(value = "specimens", method = RequestMethod.GET)
 //     public ModelAndView doGetSpecimens(
index e1435b34791174de7ff23d52cbd889d25b39f489..2ae3183fc9fe506e435d6d347695fb4b00beabb7 100644 (file)
@@ -27,7 +27,7 @@ log4j.logger.eu.etaxonomy.cdm = INFO
 #        avoid vast amount of "getCitation not yet implemented" & "getProblems not yet implemented" messages
 log4j.logger.eu.etaxonomy.cdm.model.name.TaxonName = ERROR
 log4j.logger.eu.etaxonomy.cdm.database.UpdatableRoutingDataSource = INFO
-log4j.logger.eu.etaxonomy.cdm.persistence.dao.initializer.AbstractBeanInitializer = ERROR
+log4j.logger.eu.etaxonomy.cdm.persistence.dao.initializer = ERROR
 log4j.logger.eu.etaxonomy.cdm.remote.controller = INFO
 #log4j.logger.eu.etaxonomy.cdm.remote.json.processor.AbstractCdmBeanProcessor  = DEBUG
 log4j.logger.eu.etaxonomy.cdm.remote.controller.interceptor = WARN
index a80c9f5879d023561268fd704143d6f7dcf1e8ba..cf2e1ff00e380a5e80d7beb4a779d5613054d6f6 100644 (file)
@@ -15,7 +15,7 @@
     <!-- \r
         This applicationContext is not a full cdm application context. It only covers the remote compoents \r
         without service and persistence. The cacheManager is usually provided via a Java config class in\r
-        eu/etaxonomy/cdm/service/api/conf which cannot  use in this conetext. Therefore we are \r
+        eu/etaxonomy/cdm/service/api/conf which cannot  use in this context. Therefore we are \r
         using the EhCacheManagerFactoryBean here to initialize the cacheManager bean.\r
      -->\r
     <bean id="cacheManager" class="org.springframework.cache.ehcache.EhCacheManagerFactoryBean">\r
index 238e97802743ebbd6c805d6419855bcf134ae234..8956235a6ed9e0055a8ecddee7ebe15587491c3e 100644 (file)
@@ -27,7 +27,7 @@ log4j.logger.eu.etaxonomy.cdm = INFO
 #        avoid vast amount of "getCitation not yet implemented" & "getProblems not yet implemented" messages\r
 log4j.logger.eu.etaxonomy.cdm.model.name.TaxonName = ERROR\r
 log4j.logger.eu.etaxonomy.cdm.database.UpdatableRoutingDataSource = INFO\r
-log4j.logger.eu.etaxonomy.cdm.persistence.dao.AbstractBeanInitializer = ERROR\r
+log4j.logger.eu.etaxonomy.cdm.persistence.dao.initializer = ERROR\r
 log4j.logger.eu.etaxonomy.cdm.remote.controller = INFO\r
 log4j.logger.eu.etaxonomy.cdm.remote.controller.interceptor = WARN\r
 #log4j.logger.eu.etaxonomy.cdm.remote.json.processor.AbstractCdmBeanProcessor  = DEBUG\r
index 60b92015dcf56fab2bbb75a036fa3f894db66029..cce8c6c87931bf6c7b87e570a2b5784e7d6a9b3f 100644 (file)
@@ -6,7 +6,7 @@
   <parent>
     <groupId>eu.etaxonomy</groupId>
     <artifactId>cdmlib-parent</artifactId>
-    <version>5.2.0</version>
+    <version>5.3.0</version>
     <relativePath>../pom.xml</relativePath>
   </parent>
 
index 9970e351547150def05d1799f1a23fe895b41039..9665f2b544f2c410e154c6b95ea0375eca5699ea 100644 (file)
@@ -19,6 +19,7 @@ import org.springframework.beans.BeansException;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.context.ApplicationContext;
 import org.springframework.context.ApplicationContextAware;
+import org.springframework.context.annotation.Lazy;
 import org.springframework.orm.hibernate5.HibernateTransactionManager;
 import org.springframework.security.authentication.ProviderManager;
 import org.springframework.security.authentication.UsernamePasswordAuthenticationToken;
@@ -145,6 +146,7 @@ public class CdmRepository implements ICdmRepository, ApplicationContextAware {
        //@Autowired
        private DataSource dataSource;
        @Autowired
+       @Lazy
        private ProviderManager authenticationManager;
        @Autowired
        private IUserService userService;
index cbf917962841bcc67183dd389010d0974bd33f7f..71593dd70c602733665967235854eaa6b94fa16b 100644 (file)
@@ -15,12 +15,10 @@ import net.sf.ehcache.config.CacheConfiguration.CacheEventListenerFactoryConfigu
 import net.sf.ehcache.store.MemoryStoreEvictionPolicy;
 
 /**
- * CDM Entity Cacher class based on EhCache.
+ * CDM Entity Cacher class based on EhCache using UUID as key.
  * The cacher allows null values to be cached.
  *
  * @author cmathew
- *
- * @param <T>
  */
 
 public abstract class CdmCacher implements ICdmUuidCacher {
@@ -30,7 +28,7 @@ public abstract class CdmCacher implements ICdmUuidCacher {
     @Autowired
     public CacheManager cacheManager;
 
-    public static final String DEFAULT_CACHE_NAME = "defaultCache"; //TODO compare with CacheConfiguration where the name for the default cache is 'default', Why another name here?
+    public static final String DEFAULT_CACHE_NAME = "cdmDefaultCache"; //TODO compare with CacheConfiguration where the name for the default cache is 'default', Why another name here?
 
     /**
      * Constructor which initialises a singleton {@link net.sf.ehcache.CacheManager}
@@ -103,6 +101,11 @@ public abstract class CdmCacher implements ICdmUuidCacher {
         return defaultCache;
     }
 
+    @Override
+    public void dispose(){
+        cacheManager.getCache(DEFAULT_CACHE_NAME).dispose();
+    }
+
     /**
      * Gets the cache element corresponding to the given {@link java.util.UUID}
      *
@@ -188,7 +191,6 @@ public abstract class CdmCacher implements ICdmUuidCacher {
     @Override
     public boolean existsAndIsNotNull(UUID uuid) {
         Element e = getCacheElement(uuid);
-        CdmBase cdmEntity;
         if (e != null) {
             return e.getObjectValue() != null;
         }
index a8e7dd0d4bdcaff76d8638316472e2b1c2c1e92a..40b6dbdbcf9ea50e148e7f0fcb95fdc67774ee5c 100644 (file)
@@ -13,8 +13,6 @@ import eu.etaxonomy.cdm.model.common.DefinedTermBase;
  * CDM Entity Cacher class which handles the caching of Defined Terms.
  *
  * @author cmathew
- *
- * @param <T>
  */
 @Component
 public class CdmTermCacher extends CdmCacher {
index 69acddf2ffa2ac16f6c41f3e6e9910fab82dbd5d..0ea9d7b9fa9e042c350695d01a3e71340da039a4 100644 (file)
@@ -64,7 +64,8 @@ public class EhCacheConfiguration implements DisposableBean {
 
 
     /**
-     * Returns the default cache configuration.
+     * Returns the default cache configuration for the cache
+     * named {@link CdmCacher#DEFAULT_CACHE_NAME "cdmDefaultCache"}
      *
      * @return
      */
@@ -77,6 +78,7 @@ public class EhCacheConfiguration implements DisposableBean {
 
         CacheConfiguration cc = new CacheConfiguration(CdmCacher.DEFAULT_CACHE_NAME, 500)
                 .memoryStoreEvictionPolicy(MemoryStoreEvictionPolicy.LFU)
+                .maxEntriesLocalHeap(10) // avoid ehache consuming too much heap
                 .eternal(false)
                 // default ttl and tti set to 2 hours
                 .timeToLiveSeconds(60*60*2)
index 5dd36a30e59d0dfe474e0a10d7734a9265808d3b..726f0b11320917853fe245e5017a119ff93f8a6b 100644 (file)
@@ -43,6 +43,7 @@ import eu.etaxonomy.cdm.api.service.pager.PagerUtils;
 import eu.etaxonomy.cdm.api.service.pager.impl.AbstractPagerImpl;
 import eu.etaxonomy.cdm.api.service.pager.impl.DefaultPagerImpl;
 import eu.etaxonomy.cdm.common.monitor.IProgressMonitor;
+import eu.etaxonomy.cdm.exception.FilterException;
 import eu.etaxonomy.cdm.exception.UnpublishedException;
 import eu.etaxonomy.cdm.hibernate.HHH_9751_Util;
 import eu.etaxonomy.cdm.hibernate.HibernateProxyHelper;
@@ -179,19 +180,32 @@ public class ClassificationServiceImpl
        }
     }
 
+    @Override
+    public List<TaxonNode> listRankSpecificRootNodes(Classification classification, Rank rank,
+            boolean includeUnpublished, Integer pageSize, Integer pageIndex, List<String> propertyPaths) {
+        return listRankSpecificRootNodes(classification, null, rank, includeUnpublished, pageSize, pageIndex, propertyPaths);
+    }
+
     /**
      * {@inheritDoc}
      */
     @Override
-    public List<TaxonNode> listRankSpecificRootNodes(Classification classification, Rank rank,
+    public List<TaxonNode> listRankSpecificRootNodes(Classification classification,
+            TaxonNode subtree, Rank rank,
             boolean includeUnpublished, Integer pageSize, Integer pageIndex, List<String> propertyPaths) {
-        return pageRankSpecificRootNodes(classification, rank, includeUnpublished, pageSize, pageIndex, propertyPaths).getRecords();
+        return pageRankSpecificRootNodes(classification, subtree, rank, includeUnpublished, pageSize, pageIndex, propertyPaths).getRecords();
     }
 
     @Override
     public Pager<TaxonNode> pageRankSpecificRootNodes(Classification classification, Rank rank,
             boolean includeUnpublished, Integer pageSize, Integer pageIndex, List<String> propertyPaths) {
-        long[] numberOfResults = dao.countRankSpecificRootNodes(classification, includeUnpublished, rank);
+        return pageRankSpecificRootNodes(classification, null, rank, includeUnpublished, pageSize, pageIndex, propertyPaths);
+    }
+
+    @Override
+    public Pager<TaxonNode> pageRankSpecificRootNodes(Classification classification, TaxonNode subtree, Rank rank,
+            boolean includeUnpublished, Integer pageSize, Integer pageIndex, List<String> propertyPaths) {
+        long[] numberOfResults = dao.countRankSpecificRootNodes(classification, subtree, includeUnpublished, rank);
         long totalNumberOfResults = numberOfResults[0] + (numberOfResults.length > 1 ? numberOfResults[1] : 0);
 
         List<TaxonNode> results = new ArrayList<>();
@@ -211,7 +225,8 @@ public class ClassificationServiceImpl
                 }
 
                 List<TaxonNode> perQueryResults = dao.listRankSpecificRootNodes(classification,
-                        rank, includeUnpublished, remainingLimit, start, propertyPaths, queryIndex);
+                        subtree, rank, includeUnpublished, remainingLimit,
+                        start, propertyPaths, queryIndex);
                 results.addAll(perQueryResults);
                 if(remainingLimit != null ){
                     remainingLimit = remainingLimit - results.size();
@@ -231,12 +246,17 @@ public class ClassificationServiceImpl
 
     }
 
+    @Override
+    public List<TaxonNode> loadTreeBranch(TaxonNode taxonNode, Rank baseRank,
+            boolean includeUnpublished, List<String> propertyPaths) throws UnpublishedException{
+        return loadTreeBranch(taxonNode, null, baseRank, includeUnpublished, propertyPaths);
+    }
 
     /**
      * {@inheritDoc}
      */
     @Override
-    public List<TaxonNode> loadTreeBranch(TaxonNode taxonNode, Rank baseRank,
+    public List<TaxonNode> loadTreeBranch(TaxonNode taxonNode, TaxonNode subtree, Rank baseRank,
             boolean includeUnpublished, List<String> propertyPaths) throws UnpublishedException{
 
         TaxonNode thisNode = taxonNodeDao.load(taxonNode.getUuid(), propertyPaths);
@@ -274,6 +294,9 @@ public class ClassificationServiceImpl
             if(baseRank != null && parentNodeRank != null && baseRank.isLower(parentNodeRank)){
                 break;
             }
+            if((subtree!= null && !subtree.isAncestor(parentNode) )){
+                break;
+            }
 
             pathToRoot.add(parentNode);
             thisNode = parentNode;
@@ -289,14 +312,26 @@ public class ClassificationServiceImpl
     @Override
     public List<TaxonNode> loadTreeBranchToTaxon(Taxon taxon, Classification classification, Rank baseRank,
             boolean includeUnpublished, List<String> propertyPaths) throws UnpublishedException{
+        return loadTreeBranchToTaxon(taxon, classification, null, baseRank, includeUnpublished, propertyPaths);
+    }
+
+    @Override
+    public List<TaxonNode> loadTreeBranchToTaxon(Taxon taxon, Classification classification,
+            TaxonNode subtree, Rank baseRank,
+            boolean includeUnpublished, List<String> propertyPaths) throws UnpublishedException{
 
         UUID nodeUuid = getTaxonNodeUuidByTaxonUuid(classification.getUuid(), taxon.getUuid());
         TaxonNode node = taxonNodeService.find(nodeUuid);
         if(node == null){
             logger.warn("The specified taxon is not found in the given tree.");
             return null;
+        }else if (subtree != null && !node.isDescendant(subtree)){
+            //TODO handle as exception? E.g. FilterException, AccessDeniedException?
+            logger.warn("The specified taxon is not found for the given subtree.");
+            return null;
         }
-        return loadTreeBranch(node, baseRank, includeUnpublished, propertyPaths);
+
+        return loadTreeBranch(node, subtree, baseRank, includeUnpublished, propertyPaths);
     }
 
 
@@ -313,12 +348,26 @@ public class ClassificationServiceImpl
     @Override
     public List<TaxonNode> listChildNodesOfTaxon(UUID taxonUuid, UUID classificationUuid,
             boolean includeUnpublished, Integer pageSize, Integer pageIndex, List<String> propertyPaths){
+        try {
+            return listChildNodesOfTaxon(taxonUuid, classificationUuid, null, includeUnpublished, pageSize, pageIndex, propertyPaths);
+        } catch (FilterException e) {
+            throw new RuntimeException(e);  //this should not happen as filter is null
+        }
+    }
+
+    @Override
+    public List<TaxonNode> listChildNodesOfTaxon(UUID taxonUuid, UUID classificationUuid, UUID subtreeUuid,
+            boolean includeUnpublished, Integer pageSize, Integer pageIndex, List<String> propertyPaths) throws FilterException{
 
         Classification classification = dao.load(classificationUuid);
         Taxon taxon = (Taxon) taxonDao.load(taxonUuid);
+        TaxonNode subtree = taxonNodeDao.load(subtreeUuid);
+        if (subtreeUuid != null && subtree == null){
+            throw new FilterException("Taxon node for subtree filter can not be found in database", true);
+        }
 
         List<TaxonNode> results = dao.listChildrenOf(
-                taxon, classification, includeUnpublished, pageSize, pageIndex, propertyPaths);
+                taxon, classification, subtree, includeUnpublished, pageSize, pageIndex, propertyPaths);
         Collections.sort(results, taxonNodeComparator); // FIXME this is only a HACK, order during the hibernate query in the dao
         return results;
     }
index 8007ed736f9eb8137601779f60f526ea48c58c23..23d1ad2cbe5344c06ffaa30c402b9417a02d41ac 100644 (file)
@@ -5,7 +5,7 @@
 *\r
 * The contents of this file are subject to the Mozilla Public License Version 1.1\r
 * See LICENSE.TXT at the top of this package for the full license terms.\r
-*/ \r
+*/\r
 \r
 package eu.etaxonomy.cdm.api.service;\r
 \r
@@ -23,8 +23,10 @@ import eu.etaxonomy.cdm.strategy.cache.common.IIdentifiableEntityCacheStrategy;
 \r
 @Service\r
 @Transactional(readOnly = true)\r
-public class CollectionServiceImpl extends     IdentifiableServiceBase<Collection, ICollectionDao> implements  ICollectionService {\r
-       \r
+public class CollectionServiceImpl\r
+            extends    IdentifiableServiceBase<Collection, ICollectionDao>\r
+            implements ICollectionService {\r
+\r
        @SuppressWarnings("unused")\r
        static private final Logger logger = Logger.getLogger(CollectionServiceImpl.class);\r
 \r
@@ -43,10 +45,10 @@ public class CollectionServiceImpl extends  IdentifiableServiceBase<Collection, I
                }\r
                super.updateTitleCacheImpl(clazz, stepSize, cacheStrategy, monitor);\r
        }\r
\r
 \r
-       \r
-       public List<Collection> searchByCode(String code) {\r
+\r
+       @Override\r
+    public List<Collection> searchByCode(String code) {\r
                return this.dao.getCollectionByCode(code);\r
        }\r
 }\r
index 5ea9615669f155bcbea68d67ca04a6de49f2b8cf..846bd92b936c7c044f2706317be072ab4ce8044b 100644 (file)
@@ -84,12 +84,9 @@ import eu.etaxonomy.cdm.strategy.cache.common.IIdentifiableEntityCacheStrategy;
 
 /**
  * @author a.mueller
- * @since 24.06.2008
- * @version 1.0
- */
-/**
  * @author a.kohlbecker
- * @since Dec 5, 2013
+ *
+ * @since 24.06.2008
  *
  */
 @Service
@@ -421,12 +418,13 @@ public class DescriptionServiceImpl
      * move: descriptionElementService.search
      */
     @Override
-    public Pager<DescriptionElementBase> searchElements(Class<? extends DescriptionElementBase> clazz, String queryString, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) {
+//    public Pager<T> searchElements(Class<? extends T> clazz, String queryString, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) {
+    public <S extends DescriptionElementBase> Pager<S> searchElements(Class<S> clazz, String queryString, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) {
         long numberOfResults = descriptionElementDao.count(clazz, queryString);
 
-        List<DescriptionElementBase> results = new ArrayList<>();
+        List<S> results = new ArrayList<>();
         if(numberOfResults > 0) { // no point checking again //TODO use AbstractPagerImpl.hasResultsInRange(numberOfResults, pageNumber, pageSize)
-            results = descriptionElementDao.search(clazz, queryString, pageSize, pageNumber, orderHints, propertyPaths);
+            results = (List<S>)descriptionElementDao.search(clazz, queryString, pageSize, pageNumber, orderHints, propertyPaths);
         }
 
         return new DefaultPagerImpl<>(pageNumber, numberOfResults, pageSize, results);
index fbc379f182c6aa97b9d6630f9c82ea1de416c846..b0290fb96205812fb996da5b198f65f2223af5f4 100644 (file)
@@ -125,20 +125,29 @@ public class DescriptiveDataSetService
 
     @Override
     public Collection<SpecimenNodeWrapper> loadSpecimens(DescriptiveDataSet descriptiveDataSet){
-        //set filter parameters
+        List<UUID> filteredNodes = findFilteredTaxonNodes(descriptiveDataSet);
+        return occurrenceService.listUuidAndTitleCacheByAssociatedTaxon(filteredNodes, null, null);
+    }
+
+    @Override
+    public List<UUID> findFilteredTaxonNodes(DescriptiveDataSet descriptiveDataSet){
         TaxonNodeFilter filter = TaxonNodeFilter.NewRankInstance(descriptiveDataSet.getMinRank(), descriptiveDataSet.getMaxRank());
         descriptiveDataSet.getGeoFilter().forEach(area -> filter.orArea(area.getUuid()));
         descriptiveDataSet.getTaxonSubtreeFilter().forEach(node -> filter.orSubtree(node));
         filter.setIncludeUnpublished(true);
 
-        List<UUID> filteredNodes = taxonNodeService.uuidList(filter);
-        return occurrenceService.listUuidAndTitleCacheByAssociatedTaxon(filteredNodes, null, null);
+        return taxonNodeService.uuidList(filter);
+    }
+
+    @Override
+    public List<TaxonNode> loadFilteredTaxonNodes(DescriptiveDataSet descriptiveDataSet, List<String> propertyPaths){
+        return taxonNodeService.load(findFilteredTaxonNodes(descriptiveDataSet), propertyPaths);
     }
 
-    private TaxonNode findTaxonNodeForDescription(TaxonNode taxonNode, DescriptionBase description){
-        List<DerivedUnit> units = occurrenceService.listByAssociatedTaxon(DerivedUnit.class, null, taxonNode.getTaxon(), null, null, null, null, Arrays.asList("descriptions"));
-        for (DerivedUnit unit : units) {
-            if(unit.getDescriptions().contains(description)){
+    private TaxonNode findTaxonNodeForDescription(TaxonNode taxonNode, SpecimenOrObservationBase specimen){
+        Collection<SpecimenNodeWrapper> nodeWrapper = occurrenceService.listUuidAndTitleCacheByAssociatedTaxon(Arrays.asList(taxonNode.getUuid()), null, null);
+        for (SpecimenNodeWrapper specimenNodeWrapper : nodeWrapper) {
+            if(specimenNodeWrapper.getUuidAndTitleCache().getId().equals(specimen.getId())){
                 return taxonNode;
             }
         }
@@ -159,7 +168,7 @@ public class DescriptiveDataSetService
                 for (TaxonNode node : taxonSubtreeFilter) {
                     //check for node
                     node = taxonNodeService.load(node.getId(), Arrays.asList("taxon"));
-                    taxonNode = findTaxonNodeForDescription(node, description);
+                    taxonNode = findTaxonNodeForDescription(node, specimen);
                     if(taxonNode!=null){
                         break;
                     }
@@ -167,7 +176,7 @@ public class DescriptiveDataSetService
                         //check for child nodes
                         List<TaxonNode> allChildren = taxonNodeService.loadChildNodesOfTaxonNode(node, Arrays.asList("taxon"), true, true, null);
                         for (TaxonNode child : allChildren) {
-                            taxonNode = findTaxonNodeForDescription(child, description);
+                            taxonNode = findTaxonNodeForDescription(child, specimen);
                             if(taxonNode!=null){
                                 break;
                             }
index 58c0203f7df5eee66f6955c0f2be70e90ea85d96..205318a7f63d3b96c2d21e4fd1c5297dcef5fd3b 100644 (file)
@@ -19,6 +19,7 @@ import eu.etaxonomy.cdm.api.service.config.TaxonDeletionConfigurator;
 import eu.etaxonomy.cdm.api.service.dto.GroupedTaxonDTO;
 import eu.etaxonomy.cdm.api.service.dto.TaxonInContextDTO;
 import eu.etaxonomy.cdm.api.service.pager.Pager;
+import eu.etaxonomy.cdm.exception.FilterException;
 import eu.etaxonomy.cdm.exception.UnpublishedException;
 import eu.etaxonomy.cdm.model.common.MarkerType;
 import eu.etaxonomy.cdm.model.media.MediaRepresentation;
@@ -97,16 +98,29 @@ public interface IClassificationService extends IIdentifiableEntityService<Class
      * If the <code>rank</code> is null the absolute root nodes will be returned.
 
      * @param classification may be null for all classifications
+     * @param subtree filter on a taxonomic subtree
      * @param rank the set to null for to get the root nodes of classifications
      * @param includeUnpublished if <code>true</code> unpublished taxa are also exported
      * @param pageSize The maximum number of relationships returned (can be null for all relationships)
      * @param pageIndex The offset (in pageSize chunks) from the start of the result set (0 - based)
      * @param propertyPaths
      * @return
+     * @see #pageRankSpecificRootNodes(Classification, TaxonNode, Rank, boolean, Integer, Integer, List)
      *
      */
-    public List<TaxonNode> listRankSpecificRootNodes(Classification classification, Rank rank,
-            boolean includeUnpublished, Integer pageSize, Integer pageIndex, List<String> propertyPaths);
+    public List<TaxonNode> listRankSpecificRootNodes(Classification classification, TaxonNode subtree,
+            Rank rank, boolean includeUnpublished, Integer pageSize, Integer pageIndex,
+            List<String> propertyPaths);
+
+
+    /**
+     * @see #listRankSpecificRootNodes(Classification, TaxonNode, Rank, boolean, Integer, Integer, List)
+     * @deprecated keep this for compatibility to older versions, might be removed in versions >5.3
+     */
+    @Deprecated
+    public List<TaxonNode> listRankSpecificRootNodes(Classification classification,
+            Rank rank, boolean includeUnpublished, Integer pageSize, Integer pageIndex,
+            List<String> propertyPaths);
 
 
     /**
@@ -118,6 +132,7 @@ public interface IClassificationService extends IIdentifiableEntityService<Class
      * If the <code>rank</code> is null the absolute root nodes will be returned.
      *
      * @param classification may be null for all classifications
+     * @param subtree the taxonomic subtree filter
      * @param rank the set to null for to get the root nodes of classifications
      * @param includeUnpublished if <code>true</code> unpublished taxa are also exported
      * @param pageSize The maximum number of relationships returned (can be null for all relationships)
@@ -125,9 +140,19 @@ public interface IClassificationService extends IIdentifiableEntityService<Class
      * @param propertyPaths
      * @return
      *
+     * @see #listRankSpecificRootNodes(Classification, TaxonNode, Rank, boolean, Integer, Integer, List)
+     */
+    public Pager<TaxonNode> pageRankSpecificRootNodes(Classification classification, TaxonNode subtree,
+            Rank rank, boolean includeUnpublished, Integer pageSize, Integer pageIndex,
+            List<String> propertyPaths);
+    /**
+     * @see #pageRankSpecificRootNodes(Classification, TaxonNode, Rank, boolean, Integer, Integer, List)
+     * @deprecated keep this for compatibility to older versions, might be removed in versions >5.3
      */
-    public Pager<TaxonNode> pageRankSpecificRootNodes(Classification classification, Rank rank,
-            boolean includeUnpublished, Integer pageSize, Integer pageIndex, List<String> propertyPaths);
+    @Deprecated
+    public Pager<TaxonNode> pageRankSpecificRootNodes(Classification classification,
+            Rank rank, boolean includeUnpublished, Integer pageSize, Integer pageIndex,
+            List<String> propertyPaths);
 
     /**
      * @param taxonNode
@@ -147,6 +172,8 @@ public interface IClassificationService extends IIdentifiableEntityService<Class
      * @throws UnpublishedException
      *            if any of the taxa in the path is unpublished an {@link UnpublishedException} is thrown.
      */
+    public List<TaxonNode> loadTreeBranch(TaxonNode taxonNode, TaxonNode subtree, Rank baseRank, boolean includeUnpublished,
+            List<String> propertyPaths) throws UnpublishedException;
     public List<TaxonNode> loadTreeBranch(TaxonNode taxonNode, Rank baseRank, boolean includeUnpublished,
             List<String> propertyPaths) throws UnpublishedException;
 
@@ -174,12 +201,20 @@ public interface IClassificationService extends IIdentifiableEntityService<Class
      * @throws UnpublishedException
      *            if any of the taxa in the path is unpublished an {@link UnpublishedException} is thrown
      */
-    public List<TaxonNode> loadTreeBranchToTaxon(Taxon taxon, Classification classification, Rank baseRank,
+    public List<TaxonNode> loadTreeBranchToTaxon(Taxon taxon, Classification classification,
+            TaxonNode subtree, Rank baseRank,
+            boolean includeUnpublished, List<String> propertyPaths) throws UnpublishedException;
+    public List<TaxonNode> loadTreeBranchToTaxon(Taxon taxon, Classification classification,
+            Rank baseRank,
             boolean includeUnpublished, List<String> propertyPaths) throws UnpublishedException;
 
     public List<TaxonNode> listChildNodesOfTaxon(UUID taxonUuid, UUID classificationUuid, boolean includeUnpublished,
             Integer pageSize, Integer pageIndex, List<String> propertyPaths);
 
+    public List<TaxonNode> listChildNodesOfTaxon(UUID taxonUuid, UUID classificationUuid, UUID subtreeUuid, boolean includeUnpublished,
+            Integer pageSize, Integer pageIndex, List<String> propertyPaths) throws FilterException;
+
+
     /**
      * @param taxonNode
      * @param propertyPaths
index 5ecf01745ce5afaa56731b89adb024d71effc8a8..dfca5ba7cef0f7cf0c0962dfe2685d870dc9cc94 100644 (file)
@@ -384,7 +384,7 @@ public interface IDescriptionService extends IIdentifiableEntityService<Descript
      * @return a Pager DescriptionElementBase instances
      * @see <a href="http://lucene.apache.org/java/2_4_0/queryparsersyntax.html">Apache Lucene - Query Parser Syntax</a>
      */
-    public Pager<DescriptionElementBase> searchElements(Class<? extends DescriptionElementBase> clazz, String queryString, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths);
+    public <S extends DescriptionElementBase> Pager<S> searchElements(Class<S> clazz, String queryString, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths);
 
     /**
      * Returns a List of Media that are associated with a given description element
index bf827c96de33b084c906f8ee9438b3a87ec83b36..6a4e87d564ec78708920b3d29831bde0592695bf 100644 (file)
@@ -61,13 +61,22 @@ public interface IDescriptiveDataSetService extends IIdentifiableEntityService<D
     public UUID monitGetRowWrapper(DescriptiveDataSet descriptiveDataSet);
 
     /**
-     * Loads all avaliable specimens wrapped in a {@link SpecimenNodeWrapper} object for
+     * Loads all available specimens wrapped in a {@link SpecimenNodeWrapper} object for
      * a given {@link DescriptiveDataSet} according to the filters set in the working set
-     * @param descriptiveDataSet the working set for which the specimens should be fetched
+     * @param descriptiveDataSet the data set for which the specimens should be fetched
      * @return a collection of wrapper objects
      */
     public Collection<SpecimenNodeWrapper> loadSpecimens(DescriptiveDataSet descriptiveDataSet);
 
+    /**
+     * Lists all taxon nodes that match the filter set defined in the
+     * {@link DescriptiveDataSet} given.
+     * @param the data set which defined the taxon node filter
+     * @return a list of {@link UUID}s from the filtered nodes
+     *
+     */
+    public List<UUID> findFilteredTaxonNodes(DescriptiveDataSet descriptiveDataSet);
+
     /**
      * Creates a row wrapper object for the given description
      * @param description the description for which the wrapper should be created
@@ -85,4 +94,13 @@ public interface IDescriptiveDataSetService extends IIdentifiableEntityService<D
      * @return
      */
     public SpecimenDescription findDescriptionForDescriptiveDataSet(UUID descriptiveDataSetUuid, UUID specimenUuid);
+
+    /**
+     * Loads all taxon nodes that match the filter set defined in the
+     * {@link DescriptiveDataSet} given.
+     * @param the data set which defined the taxon node filter
+     * @return a list of {@link TaxonNode}s from the filtered nodes
+     *
+     */
+    public List<TaxonNode> loadFilteredTaxonNodes(DescriptiveDataSet descriptiveDataSet, List<String> propertyPaths);
 }
index d28dd0f6f8c6e19856f4148d31b13c97257fe7dd..9ae380c4c2397b1eefadeabc87d491ae579e8ce0 100644 (file)
@@ -27,11 +27,14 @@ import eu.etaxonomy.cdm.model.common.LSID;
 import eu.etaxonomy.cdm.model.common.MarkerType;
 import eu.etaxonomy.cdm.model.media.Rights;
 import eu.etaxonomy.cdm.persistence.dao.common.Restriction;
+import eu.etaxonomy.cdm.persistence.dao.initializer.IBeanInitializer;
 import eu.etaxonomy.cdm.persistence.dto.UuidAndTitleCache;
 import eu.etaxonomy.cdm.persistence.query.MatchMode;
 import eu.etaxonomy.cdm.persistence.query.OrderHint;
 import eu.etaxonomy.cdm.strategy.cache.common.IIdentifiableEntityCacheStrategy;
 import eu.etaxonomy.cdm.strategy.match.IMatchStrategy;
+import eu.etaxonomy.cdm.strategy.match.IMatchable;
+import eu.etaxonomy.cdm.strategy.merge.IMergable;
 import eu.etaxonomy.cdm.strategy.merge.IMergeStrategy;
 
 public interface IIdentifiableEntityService<T extends IdentifiableEntity>
@@ -124,7 +127,7 @@ public interface IIdentifiableEntityService<T extends IdentifiableEntity>
      *            authorTeam.persistentTitleCache
      * @return a paged list of instances of type T matching the queryString
      */
-    public Pager<T> findByTitle(Class<? extends T> clazz, String queryString,MatchMode matchmode, List<Criterion> criteria, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths);
+    public <S extends T> Pager<S> findByTitle(Class<S> clazz, String queryString, MatchMode matchmode, List<Criterion> criteria, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths);
 
 
     /**
@@ -143,7 +146,7 @@ public interface IIdentifiableEntityService<T extends IdentifiableEntity>
      *            authorTeam.persistentTitleCache
      * @return a paged list of instances of type T matching the queryString
      */
-    public Pager<T> findByTitleWithRestrictions(Class<? extends T> clazz, String queryString,MatchMode matchmode, List<Restriction<?>> restrictions, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths);
+    public <S extends T> Pager<S> findByTitleWithRestrictions(Class<S> clazz, String queryString,MatchMode matchmode, List<Restriction<?>> restrictions, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths);
 
 
     /**
@@ -152,7 +155,7 @@ public interface IIdentifiableEntityService<T extends IdentifiableEntity>
      *
      * @return a paged list of instances of type T matching the queryString
      */
-    public Pager<T> findByTitle(IIdentifiableEntityServiceConfigurator<T> configurator);
+    public <S extends T> Pager<S> findByTitle(IIdentifiableEntityServiceConfigurator<S> configurator);
 
     /**
      * Return an Integer of how many objects matching the given query string, optionally filtered by class, optionally with a particular MatchMode
@@ -203,7 +206,7 @@ public interface IIdentifiableEntityService<T extends IdentifiableEntity>
      *            authorTeam.persistentTitleCache
      * @return a list of instances of type T matching the queryString
      */
-    public List<T> listByTitle(Class<? extends T> clazz, String queryString,MatchMode matchmode, List<Criterion> criteria, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths);
+    public <S extends T> List<S> listByTitle(Class<S> clazz, String queryString,MatchMode matchmode, List<Criterion> criteria, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths);
 
     /**
      * Return a List of objects matching the given query string, optionally filtered by class, optionally with a particular MatchMode
@@ -221,7 +224,7 @@ public interface IIdentifiableEntityService<T extends IdentifiableEntity>
      *            authorTeam.persistentTitleCache
      * @return a list of instances of type T matching the queryString
      */
-    public List<T> listByTitleWithRestrictions(Class<? extends T> clazz, String queryString,MatchMode matchmode, List<Restriction<?>> restrictions, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths);
+    public <S extends T> List<S> listByTitleWithRestrictions(Class<S> clazz, String queryString,MatchMode matchmode, List<Restriction<?>> restrictions, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths);
 
     /**
      * Return a List of objects matching the given query string, optionally filtered by class, optionally with a particular MatchMode
@@ -239,7 +242,7 @@ public interface IIdentifiableEntityService<T extends IdentifiableEntity>
      *            authorTeam.persistentTitleCache
      * @return a list of instances of type T matching the queryString
      */
-    public List<T> listByReferenceTitle(Class<? extends T> clazz, String queryString,MatchMode matchmode, List<Criterion> criteria, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths);
+    public <S extends T> List<S> listByReferenceTitle(Class<S> clazz, String queryString,MatchMode matchmode, List<Criterion> criteria, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths);
 
     /**
      * Return a List of objects matching the given query string, optionally filtered by class, optionally with a particular MatchMode
@@ -257,7 +260,7 @@ public interface IIdentifiableEntityService<T extends IdentifiableEntity>
      *            authorTeam.persistentTitleCache
      * @return a list of instances of type T matching the queryString
      */
-    public List<T> listByReferenceTitleWithRestrictions(Class<? extends T> clazz, String queryString,MatchMode matchmode, List<Restriction<?>> restrictions, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths);
+    public <S extends T> List<S> listByReferenceTitleWithRestrictions(Class<S> clazz, String queryString,MatchMode matchmode, List<Restriction<?>> restrictions, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths);
 
     /**
      * Returns a Paged List of IdentifiableEntity instances where the default field matches the String queryString (as interpreted by the Lucene QueryParser)
@@ -275,6 +278,7 @@ public interface IIdentifiableEntityService<T extends IdentifiableEntity>
      * @see <a href="http://lucene.apache.org/java/2_4_0/queryparsersyntax.html">Apache Lucene - Query Parser Syntax</a>
      */
     public Pager<T> search(Class<? extends T> clazz, String queryString, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths);
+//    public <S extends T> Pager<S> search(Class<S> clazz, String queryString, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths);
 
 
     /**
@@ -304,7 +308,7 @@ public interface IIdentifiableEntityService<T extends IdentifiableEntity>
      * @param matchMode
      * @return
      */
-    public Pager<T> findTitleCache(Class<? extends T> clazz, String queryString, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, MatchMode matchMode);
+    public <S extends T> Pager<S> findTitleCache(Class<S> clazz, String queryString, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, MatchMode matchMode);
 
 
     /**
index b6f493819be3cfdbfe68e15cbcb58b99a7cfb203..5f8fd3ed1c4c541040e2f10eda2a0e0b6460142d 100644 (file)
@@ -445,9 +445,9 @@ public interface IService<T extends ICdmBase>{
      */\r
     public MergeResult<T> merge(T newInstance, boolean returnTransientEntity);\r
 \r
-    public Pager<T> page(Class<? extends T> clazz, String param, String queryString, MatchMode matchmode, List<Criterion> criteria, Integer pageSize, Integer pageIndex, List<OrderHint> orderHints, List<String> propertyPaths);\r
+    public <S extends T> Pager<S> page(Class<S> clazz, String param, String queryString, MatchMode matchmode, List<Criterion> criteria, Integer pageSize, Integer pageIndex, List<OrderHint> orderHints, List<String> propertyPaths);\r
 \r
-    public Pager<T> pageByRestrictions(Class<? extends T> clazz, String param, String queryString, MatchMode matchmode, List<Restriction<?>> restrictions, Integer pageSize, Integer pageIndex, List<OrderHint> orderHints,\r
+    public <S extends T> Pager<S> pageByRestrictions(Class<S> clazz, String param, String queryString, MatchMode matchmode, List<Restriction<?>> restrictions, Integer pageSize, Integer pageIndex, List<OrderHint> orderHints,\r
             List<String> propertyPaths);\r
 \r
 }
index d69d456064fc3ddaaf5ae500eebc9967e7fc40dd..5afb54344336492883c48a828b36f09769a4099e 100644 (file)
@@ -343,4 +343,10 @@ public interface ITaxonNodeService extends IAnnotatableService<TaxonNode>{
      */
     public TaxonNodeDto findCommonParentDto(Collection<TaxonNodeDto> nodes);
 
+    /**
+     * @param taxonNodeUuid
+     * @return
+     */
+    TaxonNodeDto dto(UUID taxonNodeUuid);
+
 }
index d4cc13b45274a6ba71561769b023a8f2af69bb77..d37445947459a1813951ea6d0ac2d72e5bc28925 100644 (file)
@@ -539,7 +539,7 @@ public interface ITaxonService
      */
     @Deprecated
     public Pager<SearchResult<TaxonBase>> findByEverythingFullText(String queryString,
-            Classification classification, boolean includeUnpublished, List<Language> languages, boolean highlightFragments,
+            Classification classification, TaxonNode subtree, boolean includeUnpublished, List<Language> languages, boolean highlightFragments,
             Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) throws IOException, LuceneParseException, LuceneMultiSearchException;
 
     /**
@@ -585,9 +585,11 @@ public interface ITaxonService
      * @throws LuceneParseException
      */
     public Pager<SearchResult<TaxonBase>> findByFullText(Class<? extends TaxonBase> clazz, String queryString,
-            Classification classification, boolean includeUnpublished, List<Language> languages,
-            boolean highlightFragments, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints,
-            List<String> propertyPaths) throws IOException, LuceneParseException;
+            Classification classification, TaxonNode subtree,
+            boolean includeUnpublished, List<Language> languages,
+            boolean highlightFragments, Integer pageSize, Integer pageNumber,
+            List<OrderHint> orderHints, List<String> propertyPaths)
+                    throws IOException, LuceneParseException;
 
 
     /**
@@ -616,8 +618,9 @@ public interface ITaxonService
      * @throws IOException
      * @throws LuceneParseException
      */
+    //TODO needed? currently only used in test
     public Pager<SearchResult<TaxonBase>> findByDistribution(List<NamedArea> areaFilter, List<PresenceAbsenceTerm> statusFilter,
-            Classification classification,
+            Classification classification, TaxonNode subtree,
             Integer pageSize, Integer pageNumber,
             List<OrderHint> orderHints, List<String> propertyPaths) throws IOException, LuceneParseException;
 
@@ -636,7 +639,9 @@ public interface ITaxonService
      *            Additional filter criterion: If a taxonomic classification
      *            three is specified here the result set will only contain taxa
      *            of the given classification
+     * @param subtree
      * @param namedAreas
+     * @param distributionStatus
      * @param languages
      *            Additional filter criterion: Search only in these languages.
      *            Not all text fields in the cdm model are multilingual, thus
@@ -668,7 +673,8 @@ public interface ITaxonService
      */
     public Pager<SearchResult<TaxonBase>> findTaxaAndNamesByFullText(
             EnumSet<TaxaAndNamesSearchMode> searchModes,
-            String queryString, Classification classification, Set<NamedArea> namedAreas, Set<PresenceAbsenceTerm> distributionStatus,
+            String queryString, Classification classification, TaxonNode subtree,
+            Set<NamedArea> namedAreas, Set<PresenceAbsenceTerm> distributionStatus,
             List<Language> languages, boolean highlightFragments, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints,
             List<String> propertyPaths) throws IOException, LuceneParseException, LuceneMultiSearchException;
 
@@ -684,14 +690,18 @@ public interface ITaxonService
      *            the query string to filter by
      * @param classification
      *            Additional filter criterion: If a taxonomic classification
-     *            three is specified here the result set will only contain taxa
+     *            tree is specified here the result set will only contain taxa
      *            of the given classification
+     * @param subtree
+     *            Additional filter criterion: If a taxonomic classification
+     *            subtree is specified here the result set will only contain taxa
+     *            of the given subtree
      * @param features
      *            TODO
      * @param languages
      *            Additional filter criterion: Search only in these languages.
-     *            Not all text fields in the cdm model are multilingual, thus
-     *            this setting will only apply to the multilingiual fields.
+     *            Not all text fields in the CDM model are multi-lingual, thus
+     *            this setting will only apply to the multi-lingual fields.
      *            Other fields are searched nevertheless if this parameter is
      *            set or not.
      * @param highlightFragments
@@ -715,7 +725,8 @@ public interface ITaxonService
      * @throws LuceneCorruptIndexException
      * @throws LuceneParseException
      */
-    public Pager<SearchResult<TaxonBase>> findByDescriptionElementFullText(Class<? extends DescriptionElementBase> clazz, String queryString, Classification classification, List<Feature> features, List<Language> languages, boolean highlightFragments, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) throws IOException, LuceneParseException;
+    public Pager<SearchResult<TaxonBase>> findByDescriptionElementFullText(Class<? extends DescriptionElementBase> clazz,
+            String queryString, Classification classification, TaxonNode subtree, List<Feature> features, List<Language> languages, boolean highlightFragments, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) throws IOException, LuceneParseException;
 
     /**
      * Lists all Media found in an any TaxonDescription associated with this
index f343d444b9138eaee108d4647fb1ae05a08f7e26..c0a72aeeabbfcdd201143e9baf08bb08560f6ccf 100644 (file)
@@ -135,10 +135,10 @@ public abstract class IdentifiableServiceBase<T extends IdentifiableEntity, DAO
 
     @Transactional(readOnly = true)
     @Override
-    public Pager<T> findByTitle(Class<? extends T> clazz, String queryString,MatchMode matchmode, List<Criterion> criteria, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) {
+    public <S extends T> Pager<S> findByTitle(Class<S> clazz, String queryString,MatchMode matchmode, List<Criterion> criteria, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) {
          long numberOfResults = dao.countByTitle(clazz, queryString, matchmode, criteria);
 
-         List<T> results = new ArrayList<>();
+         List<S> results = new ArrayList<>();
          if(numberOfResults > 0) { // no point checking again //TODO use AbstractPagerImpl.hasResultsInRange(numberOfResults, pageNumber, pageSize)
                 results = dao.findByTitle(clazz, queryString, matchmode, criteria, pageSize, pageNumber, orderHints, propertyPaths);
          }
@@ -148,10 +148,10 @@ public abstract class IdentifiableServiceBase<T extends IdentifiableEntity, DAO
 
        @Transactional(readOnly = true)
        @Override
-       public Pager<T> findByTitleWithRestrictions(Class<? extends T> clazz, String queryString, MatchMode matchmode, List<Restriction<?>> restrictions, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) {
+       public <S extends T> Pager<S> findByTitleWithRestrictions(Class<S> clazz, String queryString, MatchMode matchmode, List<Restriction<?>> restrictions, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) {
                 long numberOfResults = dao.countByTitleWithRestrictions(clazz, queryString, matchmode, restrictions);
 
-                List<T> results = new ArrayList<>();
+                List<S> results = new ArrayList<>();
                 if(numberOfResults > 0) { // no point checking again //TODO use AbstractPagerImpl.hasResultsInRange(numberOfResults, pageNumber, pageSize)
                                results = dao.findByTitleWithRestrictions(clazz, queryString, matchmode, restrictions, pageSize, pageNumber, orderHints, propertyPaths);
                 }
@@ -162,7 +162,7 @@ public abstract class IdentifiableServiceBase<T extends IdentifiableEntity, DAO
 
        @Transactional(readOnly = true)
        @Override
-       public Pager<T> findByTitle(IIdentifiableEntityServiceConfigurator<T> config){
+       public <S extends T> Pager<S> findByTitle(IIdentifiableEntityServiceConfigurator<S> config){
 
            boolean withRestrictions = config.getRestrictions() != null && !config.getRestrictions().isEmpty();
            boolean withCriteria = config.getCriteria() != null && !config.getCriteria().isEmpty();
@@ -170,18 +170,18 @@ public abstract class IdentifiableServiceBase<T extends IdentifiableEntity, DAO
            if(withCriteria && withRestrictions){
                throw new RuntimeException("Restrictions and Criteria can not be used at the same time");
            } else if(withRestrictions){
-               return findByTitleWithRestrictions(config.getClazz(), config.getTitleSearchStringSqlized(), config.getMatchMode(), config.getRestrictions(), config.getPageSize(), config.getPageNumber(), config.getOrderHints(), config.getPropertyPaths());
+               return findByTitleWithRestrictions((Class<S>)config.getClazz(), config.getTitleSearchStringSqlized(), config.getMatchMode(), config.getRestrictions(), config.getPageSize(), config.getPageNumber(), config.getOrderHints(), config.getPropertyPaths());
            } else {
-               return findByTitle(config.getClazz(), config.getTitleSearchStringSqlized(), config.getMatchMode(), config.getCriteria(), config.getPageSize(), config.getPageNumber(), config.getOrderHints(), config.getPropertyPaths());
+               return findByTitle((Class<S>) config.getClazz(), config.getTitleSearchStringSqlized(), config.getMatchMode(), config.getCriteria(), config.getPageSize(), config.getPageNumber(), config.getOrderHints(), config.getPropertyPaths());
            }
        }
 
    @Transactional(readOnly = true)
     @Override
-    public List<T> listByTitle(Class<? extends T> clazz, String queryString,MatchMode matchmode, List<Criterion> criteria, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) {
+    public <S extends T> List<S> listByTitle(Class<S> clazz, String queryString,MatchMode matchmode, List<Criterion> criteria, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) {
          long numberOfResults = dao.countByTitle(clazz, queryString, matchmode, criteria);
 
-         List<T> results = new ArrayList<>();
+         List<S> results = new ArrayList<>();
          if(numberOfResults > 0) { // no point checking again //TODO use AbstractPagerImpl.hasResultsInRange(numberOfResults, pageNumber, pageSize)
                 results = dao.findByTitle(clazz, queryString, matchmode, criteria, pageSize, pageNumber, orderHints, propertyPaths);
          }
@@ -190,10 +190,10 @@ public abstract class IdentifiableServiceBase<T extends IdentifiableEntity, DAO
 
        @Transactional(readOnly = true)
        @Override
-       public List<T> listByTitleWithRestrictions(Class<? extends T> clazz, String queryString,MatchMode matchmode, List<Restriction<?>> restrictions, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) {
+       public <S extends T> List<S> listByTitleWithRestrictions(Class<S> clazz, String queryString,MatchMode matchmode, List<Restriction<?>> restrictions, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) {
                 long numberOfResults = dao.countByTitleWithRestrictions(clazz, queryString, matchmode, restrictions);
 
-                List<T> results = new ArrayList<>();
+                List<S> results = new ArrayList<>();
                 if(numberOfResults > 0) { // no point checking again //TODO use AbstractPagerImpl.hasResultsInRange(numberOfResults, pageNumber, pageSize)
                                results = dao.findByTitleWithRestrictions(clazz, queryString, matchmode, restrictions, pageSize, pageNumber, orderHints, propertyPaths);
                 }
@@ -202,10 +202,10 @@ public abstract class IdentifiableServiceBase<T extends IdentifiableEntity, DAO
 
        @Transactional(readOnly = true)
        @Override
-       public Pager<T> findTitleCache(Class<? extends T> clazz, String queryString, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, MatchMode matchMode){
+       public <S extends T> Pager<S> findTitleCache(Class<S> clazz, String queryString, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, MatchMode matchMode){
                long numberOfResults = dao.countTitleCache(clazz, queryString, matchMode);
 
-                List<T> results = new ArrayList<>();
+                List<S> results = new ArrayList<>();
                 if(numberOfResults > 0) { // no point checking again //TODO use AbstractPagerImpl.hasResultsInRange(numberOfResults, pageNumber, pageSize)
                                results = dao.findTitleCache(clazz, queryString, pageSize, pageNumber, orderHints, matchMode);
                 }
@@ -217,10 +217,10 @@ public abstract class IdentifiableServiceBase<T extends IdentifiableEntity, DAO
 
     @Transactional(readOnly = true)
     @Override
-    public List<T> listByReferenceTitle(Class<? extends T> clazz, String queryString,MatchMode matchmode, List<Criterion> criteria, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) {
+    public <S extends T> List<S> listByReferenceTitle(Class<S> clazz, String queryString,MatchMode matchmode, List<Criterion> criteria, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) {
          long numberOfResults = dao.countByReferenceTitle(clazz, queryString, matchmode, criteria);
 
-         List<T> results = new ArrayList<>();
+         List<S> results = new ArrayList<>();
          if(numberOfResults > 0) { // no point checking again //TODO use AbstractPagerImpl.hasResultsInRange(numberOfResults, pageNumber, pageSize)
              results = dao.findByReferenceTitle(clazz, queryString, matchmode, criteria, pageSize, pageNumber, orderHints, propertyPaths);
          }
@@ -229,10 +229,10 @@ public abstract class IdentifiableServiceBase<T extends IdentifiableEntity, DAO
 
        @Transactional(readOnly = true)
        @Override
-       public List<T> listByReferenceTitleWithRestrictions(Class<? extends T> clazz, String queryString,MatchMode matchmode, List<Restriction<?>> restrictions, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) {
+       public <S extends T> List<S> listByReferenceTitleWithRestrictions(Class<S> clazz, String queryString,MatchMode matchmode, List<Restriction<?>> restrictions, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) {
                 long numberOfResults = dao.countByReferenceTitleWithRestrictions(clazz, queryString, matchmode, restrictions);
 
-                List<T> results = new ArrayList<>();
+                List<S> results = new ArrayList<>();
                 if(numberOfResults > 0) { // no point checking again //TODO use AbstractPagerImpl.hasResultsInRange(numberOfResults, pageNumber, pageSize)
                     results = dao.findByReferenceTitleWithRestrictions(clazz, queryString, matchmode, restrictions, pageSize, pageNumber, orderHints, propertyPaths);
                 }
@@ -248,6 +248,7 @@ public abstract class IdentifiableServiceBase<T extends IdentifiableEntity, DAO
        @Transactional(readOnly = true)
        @Override
        public Pager<T> search(Class<? extends T> clazz, String queryString, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) {
+//     public <S extends T> Pager<S> search(Class<S> clazz, String queryString, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) {
         long numberOfResults = dao.count(clazz,queryString);
 
                List<T> results = new ArrayList<>();
@@ -504,7 +505,7 @@ public abstract class IdentifiableServiceBase<T extends IdentifiableEntity, DAO
 
                while (! dedupState.isCompleted){
                        //get x page sizes
-                       List<T> objectList = getPages(clazz, dedupState, orderHints);
+                       List<? extends T> objectList = getPages(clazz, dedupState, orderHints);
                        //after each page check if any changes took place
                        int nUnEqualPages = handleAllPages(objectList, dedupState, nextGroup, matchStrategy, mergeStrategy);
                        nUnEqualPages = nUnEqualPages + dedupState.pageSize * dedupState.startPage;
@@ -518,7 +519,7 @@ public abstract class IdentifiableServiceBase<T extends IdentifiableEntity, DAO
        }
 
 
-       private int handleAllPages(List<T> objectList, DeduplicateState dedupState, List<T> nextGroup, IMatchStrategy matchStrategy, IMergeStrategy mergeStrategy) {
+       private int handleAllPages(List<? extends T> objectList, DeduplicateState dedupState, List<T> nextGroup, IMatchStrategy matchStrategy, IMergeStrategy mergeStrategy) {
                int nUnEqual = 0;
                for (T object : objectList){
                        String currentTitleCache = object.getTitleCache();
@@ -538,10 +539,10 @@ public abstract class IdentifiableServiceBase<T extends IdentifiableEntity, DAO
                return nUnEqual;
        }
 
-       private List<T> getPages(Class<? extends T> clazz, DeduplicateState dedupState, List<OrderHint> orderHints) {
-               List<T> result = new ArrayList<>();
+       private <S extends T> List<S> getPages(Class<S> clazz, DeduplicateState dedupState, List<OrderHint> orderHints) {
+               List<S> result = new ArrayList<>();
                for (int pageNo = dedupState.startPage; pageNo < dedupState.startPage + dedupState.nPages; pageNo++){
-                       List<T> objectList = listByTitleWithRestrictions(clazz, null, null, null, dedupState.pageSize, pageNo, orderHints, null);
+                       List<S> objectList = listByTitleWithRestrictions(clazz, null, null, null, dedupState.pageSize, pageNo, orderHints, null);
                        result.addAll(objectList);
                }
                if (result.size()< dedupState.nPages * dedupState.pageSize ){
index 8f4692f54eab553dcd87d2ad68359207f77b7f98..8aeaa322a03aa2f54097272bf6ed24526b55480e 100644 (file)
@@ -539,6 +539,9 @@ public class OccurrenceServiceImpl extends IdentifiableServiceBase<SpecimenOrObs
         specimenIdentifier = CdmFormatterFactory.format(derivedUnit, new FormatKey[] {
                 collectionKey, FormatKey.SPACE,
                 FormatKey.MOST_SIGNIFICANT_IDENTIFIER, FormatKey.SPACE });
+        if(CdmUtils.isBlank(specimenIdentifier)){
+            specimenIdentifier = derivedUnit.getTitleCache();
+        }
         if(CdmUtils.isBlank(specimenIdentifier)){
             specimenIdentifier = derivedUnit.getUuid().toString();
         }
@@ -587,15 +590,12 @@ public class OccurrenceServiceImpl extends IdentifiableServiceBase<SpecimenOrObs
                 fieldUnitDTO.setHasType(true);
             }
             TypeDesignationStatusBase<?> typeStatus = specimenTypeDesignation.getTypeStatus();
-            if (typeStatus != null) {
-                List<String> typedTaxaNames = new ArrayList<>();
-                String label = typeStatus.getLabel();
-                Set<TaxonName> typifiedNames = specimenTypeDesignation.getTypifiedNames();
-                for (TaxonName taxonName : typifiedNames) {
-                    typedTaxaNames.add(taxonName.getNameCache());
-                }
-                preservedSpecimenDTO.addTypes(label, typedTaxaNames);
+            Set<TaxonName> typifiedNames = specimenTypeDesignation.getTypifiedNames();
+            List<String> typedTaxaNames = new ArrayList<>();
+            for (TaxonName taxonName : typifiedNames) {
+                typedTaxaNames.add(taxonName.getTitleCache());
             }
+            preservedSpecimenDTO.addTypes(typeStatus!=null?typeStatus.getLabel():"", typedTaxaNames);
         }
 
         // individuals associations
@@ -747,7 +747,7 @@ public class OccurrenceServiceImpl extends IdentifiableServiceBase<SpecimenOrObs
                     if (derivative instanceof DnaSample) {
                         dto = new DNASampleDTO(derivative);
                     } else {
-                        dto = PreservedSpecimenDTO.newInstance(derivative);
+                        dto = new PreservedSpecimenDTO(derivative);
                     }
                     alreadyCollectedSpecimen.put(dto.getUuid(), dto);
                     dto.addAllDerivates(getDerivedUnitDTOsFor(dto, derivative, alreadyCollectedSpecimen));
@@ -862,7 +862,7 @@ public class OccurrenceServiceImpl extends IdentifiableServiceBase<SpecimenOrObs
                         derivedUnitDTO = new DNASampleDTO(derivedUnit);
                     } else {
                         derivedUnit = HibernateProxyHelper.deproxy(o, DerivedUnit.class);
-                        derivedUnitDTO = PreservedSpecimenDTO.newInstance(derivedUnit);
+                        derivedUnitDTO = new PreservedSpecimenDTO(derivedUnit);
                     }
                     if (alreadyCollectedSpecimen.get(derivedUnitDTO.getUuid()) == null){
                         alreadyCollectedSpecimen.put(derivedUnitDTO.getUuid(), derivedUnitDTO);
@@ -1034,7 +1034,7 @@ public class OccurrenceServiceImpl extends IdentifiableServiceBase<SpecimenOrObs
                 if (specimen instanceof DnaSample){
                     originalDTO = new DNASampleDTO((DnaSample)specimen);
                 } else {
-                    originalDTO = PreservedSpecimenDTO.newInstance((DerivedUnit)specimen);
+                    originalDTO = new PreservedSpecimenDTO((DerivedUnit)specimen);
                 }
                 originalDTO.addDerivate(derivedUnitDTO);
                 fieldUnitDto = findFieldUnitDTO(originalDTO, fieldUnits, alreadyCollectedSpecimen);
@@ -1596,8 +1596,8 @@ public class OccurrenceServiceImpl extends IdentifiableServiceBase<SpecimenOrObs
     }
 
     @Override
-    public Pager<SpecimenOrObservationBase> findByTitle(
-            IIdentifiableEntityServiceConfigurator<SpecimenOrObservationBase> config) {
+    public <S extends SpecimenOrObservationBase> Pager<S> findByTitle(
+            IIdentifiableEntityServiceConfigurator<S> config) {
         if (config instanceof FindOccurrencesConfigurator) {
             FindOccurrencesConfigurator occurrenceConfig = (FindOccurrencesConfigurator) config;
             List<SpecimenOrObservationBase> occurrences = new ArrayList<>();
@@ -1612,13 +1612,14 @@ public class OccurrenceServiceImpl extends IdentifiableServiceBase<SpecimenOrObs
             if(occurrenceConfig.getAssociatedTaxonNameUuid()!=null){
                 taxonName = nameService.load(occurrenceConfig.getAssociatedTaxonNameUuid());
             }
-            occurrences.addAll(dao.findOccurrences(occurrenceConfig.getClazz(),
+            List<? extends SpecimenOrObservationBase> foundOccurrences = dao.findOccurrences(occurrenceConfig.getClazz(),
                     occurrenceConfig.getTitleSearchString(), occurrenceConfig.getSignificantIdentifier(),
                     occurrenceConfig.getSpecimenType(), taxon, taxonName, occurrenceConfig.getMatchMode(), null, null,
-                    occurrenceConfig.getOrderHints(), occurrenceConfig.getPropertyPaths()));
+                    occurrenceConfig.getOrderHints(), occurrenceConfig.getPropertyPaths());
+            occurrences.addAll(foundOccurrences);
             occurrences = filterOccurencesByAssignmentAndHierarchy(occurrenceConfig, occurrences, taxon, taxonName);
 
-            return new DefaultPagerImpl<>(config.getPageNumber(), occurrences.size(), config.getPageSize(), occurrences);
+            return new DefaultPagerImpl<>(config.getPageNumber(), occurrences.size(), config.getPageSize(), (List<S>)occurrences);
         }
         return super.findByTitle(config);
     }
@@ -1629,9 +1630,9 @@ public class OccurrenceServiceImpl extends IdentifiableServiceBase<SpecimenOrObs
         //filter out (un-)assigned specimens
         if(taxon==null && taxonName==null){
             AssignmentStatus assignmentStatus = occurrenceConfig.getAssignmentStatus();
-            List<SpecimenOrObservationBase<?>> specimenWithAssociations = new ArrayList<>();
+            List<SpecimenOrObservationBase> specimenWithAssociations = new ArrayList<>();
             if(!assignmentStatus.equals(AssignmentStatus.ALL_SPECIMENS)){
-                for (SpecimenOrObservationBase<?> specimenOrObservationBase : occurrences) {
+                for (SpecimenOrObservationBase specimenOrObservationBase : occurrences) {
                     boolean includeUnpublished = true;  //TODO not sure if this is correct, maybe we have to propagate publish flag to higher methods.
                     Collection<TaxonBase<?>> associatedTaxa = listAssociatedTaxa(specimenOrObservationBase,
                             includeUnpublished, null, null, null, null);
index 67b5168f715bcfab514d6c5cd4badab0de91963f..c3ffe126f4e7286f76597f9729c2b0f3cb6865b5 100644 (file)
@@ -313,32 +313,31 @@ public abstract class ServiceBase<T extends CdmBase, DAO extends ICdmEntityDao<T
 \r
     @Override\r
     @Transactional(readOnly = true)\r
-    public Pager<T> page(Class<? extends T> clazz, String param, String queryString, MatchMode matchmode, List<Criterion> criteria, Integer pageSize, Integer pageIndex, List<OrderHint> orderHints, List<String> propertyPaths){\r
+    public <S extends T> Pager<S> page(Class<S> clazz, String param, String queryString, MatchMode matchmode, List<Criterion> criteria, Integer pageSize, Integer pageIndex, List<OrderHint> orderHints, List<String> propertyPaths){\r
 \r
-        List<T> records;\r
+        List<S> records;\r
         long resultSize = dao.countByParam(clazz, param, queryString, matchmode, criteria);\r
         if(AbstractPagerImpl.hasResultsInRange(resultSize, pageIndex, pageSize)){\r
             records = dao.findByParam(clazz, param, queryString, matchmode, criteria, pageSize, pageIndex, orderHints, propertyPaths);\r
         } else {\r
             records = new ArrayList<>();\r
         }\r
-        Pager<T> pager = new DefaultPagerImpl<>(pageIndex, resultSize, pageSize, records);\r
-        return pager;\r
+        return new DefaultPagerImpl<>(pageIndex, resultSize, pageSize, records);\r
     }\r
 \r
 \r
     @Override\r
     @Transactional(readOnly = true)\r
-    public Pager<T> pageByRestrictions(Class<? extends T> clazz, String param, String queryString, MatchMode matchmode, List<Restriction<?>> restrictions, Integer pageSize, Integer pageIndex, List<OrderHint> orderHints, List<String> propertyPaths){\r
+    public <S extends T> Pager<S> pageByRestrictions(Class<S> clazz, String param, String queryString, MatchMode matchmode, List<Restriction<?>> restrictions, Integer pageSize, Integer pageIndex, List<OrderHint> orderHints, List<String> propertyPaths){\r
 \r
-        List<T> records;\r
+        List<S> records;\r
         long resultSize = dao.countByParamWithRestrictions(clazz, param, queryString, matchmode, restrictions);\r
         if(AbstractPagerImpl.hasResultsInRange(resultSize, pageIndex, pageSize)){\r
             records = dao.findByParamWithRestrictions(clazz, param, queryString, matchmode, restrictions, pageSize, pageIndex, orderHints, propertyPaths);\r
         } else {\r
             records = new ArrayList<>();\r
         }\r
-        Pager<T> pager = new DefaultPagerImpl<>(pageIndex, resultSize, pageSize, records);\r
+        Pager<S> pager = new DefaultPagerImpl<>(pageIndex, resultSize, pageSize, records);\r
         return pager;\r
     }\r
 \r
index 19d826713c0dcdb76d4f39e1c0b6595df64be98f..195fa19c8e7549003ec43f35c69bc7d371ae9fae 100644 (file)
@@ -247,6 +247,15 @@ public class TaxonNodeServiceImpl
         return null;
     }
 
+    @Override
+    public TaxonNodeDto dto(UUID taxonNodeUuid) {
+        TaxonNode taxonNode = dao.load(taxonNodeUuid);
+        if(taxonNode.getParent() != null) {
+            return new TaxonNodeDto(taxonNode);
+        }
+        return null;
+    }
+
     @Override
     @Autowired
     protected void setDao(ITaxonNodeDao dao) {
index 8c5fbcf11483463417617112f758d17c93a98f47..4982ed0cea9c035dca55aeaf0329d82c0822d6af 100644 (file)
@@ -22,7 +22,7 @@ import java.util.UUID;
 
 import javax.persistence.EntityNotFoundException;
 
-import org.apache.commons.lang.StringUtils;
+import org.apache.commons.lang3.StringUtils;
 import org.apache.log4j.Logger;
 import org.apache.lucene.queryparser.classic.ParseException;
 import org.apache.lucene.search.BooleanClause.Occur;
@@ -579,7 +579,7 @@ public class TaxonServiceImpl
             }
             // filter by includeRelationships
             for (TaxonRelationshipEdge relationshipEdgeFilter : includeRelationships) {
-                if ( relationshipEdgeFilter.getTaxonRelationshipTypes().equals(taxRel.getType()) ) {
+                if ( relationshipEdgeFilter.getRelationshipTypes().equals(taxRel.getType()) ) {
                     if (relationshipEdgeFilter.getDirections().contains(Direction.relatedTo) && !taxa.contains(taxRel.getToTaxon())) {
                         if(logger.isDebugEnabled()){
                             logger.debug(maxDepth + ": " + taxon.getTitleCache() + " --[" + taxRel.getType().getLabel() + "]--> " + taxRel.getToTaxon().getTitleCache());
@@ -661,7 +661,8 @@ public class TaxonServiceImpl
         if (config.isDoSynonyms() || config.isDoTaxa() || config.isDoNamesWithoutTaxa() || config.isDoTaxaByCommonNames()){
                return dao.getTaxaByNameForEditor(config.isDoTaxa(), config.isDoSynonyms(), config.isDoNamesWithoutTaxa(),
                        config.isDoMisappliedNames(), config.isDoTaxaByCommonNames(), config.isIncludeUnpublished(),
-                       config.getTitleSearchStringSqlized(), config.getClassification(), config.getMatchMode(), config.getNamedAreas(), config.getOrder());
+                       config.getTitleSearchStringSqlized(), config.getClassification(), config.getSubtree(),
+                       config.getMatchMode(), config.getNamedAreas(), config.getOrder());
         }else{
             return new ArrayList<>();
         }
@@ -677,26 +678,24 @@ public class TaxonServiceImpl
         // Taxa and synonyms
         long numberTaxaResults = 0L;
 
-
         List<String> propertyPath = new ArrayList<>();
         if(configurator.getTaxonPropertyPath() != null){
             propertyPath.addAll(configurator.getTaxonPropertyPath());
         }
 
-
-       if (configurator.isDoMisappliedNames() || configurator.isDoSynonyms() || configurator.isDoTaxa() || configurator.isDoTaxaByCommonNames()){
+        if (configurator.isDoMisappliedNames() || configurator.isDoSynonyms() || configurator.isDoTaxa() || configurator.isDoTaxaByCommonNames()){
             if(configurator.getPageSize() != null){ // no point counting if we need all anyway
                 numberTaxaResults =
                     dao.countTaxaByName(configurator.isDoTaxa(),configurator.isDoSynonyms(), configurator.isDoMisappliedNames(),
                         configurator.isDoTaxaByCommonNames(), configurator.isDoIncludeAuthors(), configurator.getTitleSearchStringSqlized(),
-                        configurator.getClassification(), configurator.getMatchMode(),
+                        configurator.getClassification(), configurator.getSubtree(), configurator.getMatchMode(),
                         configurator.getNamedAreas(), configurator.isIncludeUnpublished());
             }
 
             if(configurator.getPageSize() == null || numberTaxaResults > configurator.getPageSize() * configurator.getPageNumber()){ // no point checking again if less results
                 taxa = dao.getTaxaByName(configurator.isDoTaxa(), configurator.isDoSynonyms(),
                     configurator.isDoMisappliedNames(), configurator.isDoTaxaByCommonNames(), configurator.isDoIncludeAuthors(),
-                    configurator.getTitleSearchStringSqlized(), configurator.getClassification(),
+                    configurator.getTitleSearchStringSqlized(), configurator.getClassification(), configurator.getSubtree(),
                     configurator.getMatchMode(), configurator.getNamedAreas(), configurator.isIncludeUnpublished(),
                     configurator.getOrder(), configurator.getPageSize(), configurator.getPageNumber(), propertyPath);
             }
@@ -1165,11 +1164,11 @@ public class TaxonServiceImpl
         try{
             // 1. search for accepted taxa
             List<TaxonBase> taxonList = dao.findByNameTitleCache(true, false, config.isIncludeUnpublished(),
-                    config.getTaxonNameTitle(), null, MatchMode.EXACT, null, null, 0, null, null);
+                    config.getTaxonNameTitle(), null, null, MatchMode.EXACT, null, null, 0, null, null);
             boolean bestCandidateMatchesSecUuid = false;
             boolean bestCandidateIsInClassification = false;
             int countEqualCandidates = 0;
-            for(TaxonBase taxonBaseCandidate : taxonList){
+            for(TaxonBase<?> taxonBaseCandidate : taxonList){
                 if(taxonBaseCandidate instanceof Taxon){
                     Taxon newCanditate = CdmBase.deproxy(taxonBaseCandidate, Taxon.class);
                     boolean newCandidateMatchesSecUuid = isMatchesSecUuid(newCanditate, config);
@@ -1217,7 +1216,7 @@ public class TaxonServiceImpl
             // 2. search for synonyms
             if (config.isIncludeSynonyms()){
                 List<TaxonBase> synonymList = dao.findByNameTitleCache(false, true, config.isIncludeUnpublished(),
-                        config.getTaxonNameTitle(), null, MatchMode.EXACT, null, null, 0, null, null);
+                        config.getTaxonNameTitle(), null, null, MatchMode.EXACT, null, null, 0, null, null);
                 for(TaxonBase taxonBase : synonymList){
                     if(taxonBase instanceof Synonym){
                         Synonym synonym = CdmBase.deproxy(taxonBase, Synonym.class);
@@ -1264,7 +1263,7 @@ public class TaxonServiceImpl
 
     @Override
     public Synonym findBestMatchingSynonym(String taxonName, boolean includeUnpublished) {
-        List<TaxonBase> synonymList = dao.findByNameTitleCache(false, true, includeUnpublished, taxonName, null, MatchMode.EXACT, null, null, 0, null, null);
+        List<TaxonBase> synonymList = dao.findByNameTitleCache(false, true, includeUnpublished, taxonName, null, null, MatchMode.EXACT, null, null, 0, null, null);
         if(! synonymList.isEmpty()){
             Synonym result = CdmBase.deproxy(synonymList.iterator().next(), Synonym.class);
             if(synonymList.size() == 1){
@@ -1373,12 +1372,12 @@ public class TaxonServiceImpl
     @Override
     public Pager<SearchResult<TaxonBase>> findByFullText(
             Class<? extends TaxonBase> clazz, String queryString,
-            Classification classification, boolean includeUnpublished, List<Language> languages,
+            Classification classification, TaxonNode subtree, boolean includeUnpublished, List<Language> languages,
             boolean highlightFragments, Integer pageSize, Integer pageNumber,
             List<OrderHint> orderHints, List<String> propertyPaths) throws IOException, LuceneParseException {
 
-        LuceneSearch luceneSearch = prepareFindByFullTextSearch(clazz, queryString, classification, null,
-                includeUnpublished, languages, highlightFragments, null);
+        LuceneSearch luceneSearch = prepareFindByFullTextSearch(clazz, queryString, classification, subtree,
+                null, includeUnpublished, languages, highlightFragments, null);
 
         // --- execute search
         TopGroups<BytesRef> topDocsResultSet;
@@ -1404,11 +1403,11 @@ public class TaxonServiceImpl
 
     @Override
     public Pager<SearchResult<TaxonBase>> findByDistribution(List<NamedArea> areaFilter, List<PresenceAbsenceTerm> statusFilter,
-            Classification classification,
+            Classification classification, TaxonNode subtree,
             Integer pageSize, Integer pageNumber,
             List<OrderHint> orderHints, List<String> propertyPaths) throws IOException, LuceneParseException {
 
-        LuceneSearch luceneSearch = prepareByDistributionSearch(areaFilter, statusFilter, classification);
+        LuceneSearch luceneSearch = prepareByDistributionSearch(areaFilter, statusFilter, classification, subtree);
 
         // --- execute search
         TopGroups<BytesRef> topDocsResultSet;
@@ -1444,7 +1443,7 @@ public class TaxonServiceImpl
      * @return
      */
     protected LuceneSearch prepareFindByFullTextSearch(Class<? extends CdmBase> clazz, String queryString,
-            Classification classification, String className, boolean includeUnpublished, List<Language> languages,
+            Classification classification, TaxonNode subtree, String className, boolean includeUnpublished, List<Language> languages,
             boolean highlightFragments, SortField[] sortFields) {
 
         Builder finalQueryBuilder = new Builder();
@@ -1461,7 +1460,7 @@ public class TaxonServiceImpl
         // ---- search criteria
         luceneSearch.setCdmTypRestriction(clazz);
 
-        if(!queryString.isEmpty() && !queryString.equals("*") && !queryString.equals("?") ) {
+        if(!StringUtils.isEmpty(queryString) && !queryString.equals("*") && !queryString.equals("?") ) {
             textQueryBuilder.add(taxonBaseQueryFactory.newTermQuery("titleCache", queryString), Occur.SHOULD);
             textQueryBuilder.add(taxonBaseQueryFactory.newDefinedTermQuery("name.rank", queryString, languages), Occur.SHOULD);
         }
@@ -1474,9 +1473,11 @@ public class TaxonServiceImpl
             finalQueryBuilder.add(textQuery, Occur.MUST);
         }
 
-
         if(classification != null){
-            finalQueryBuilder.add(taxonBaseQueryFactory.newEntityIdQuery("taxonNodes.classification.id", classification), Occur.MUST);
+            finalQueryBuilder.add(taxonBaseQueryFactory.newEntityIdQuery(AcceptedTaxonBridge.DOC_KEY_CLASSIFICATION_ID, classification), Occur.MUST);
+        }
+        if(subtree != null){
+            finalQueryBuilder.add(taxonBaseQueryFactory.newTermQuery(AcceptedTaxonBridge.DOC_KEY_TREEINDEX, subtree.treeIndexWc(), true), Occur.MUST);
         }
         if(!includeUnpublished)  {
             String accPublishParam = TaxonBase.ACC_TAXON_BRIDGE_PREFIX + AcceptedTaxonBridge.DOC_KEY_PUBLISH_SUFFIX;
@@ -1513,7 +1514,7 @@ public class TaxonServiceImpl
      * @throws IOException
      */
     protected LuceneSearch prepareFindByTaxonRelationFullTextSearch(TaxonRelationshipEdge edge, String queryString,
-            Classification classification, boolean includeUnpublished, List<Language> languages,
+            Classification classification, TaxonNode subtree, boolean includeUnpublished, List<Language> languages,
             boolean highlightFragments, SortField[] sortFields) throws IOException {
 
         String fromField;
@@ -1545,8 +1546,10 @@ public class TaxonServiceImpl
         QueryFactory taxonBaseQueryFactory = luceneIndexToolProvider.newQueryFactoryFor(TaxonBase.class);
 
         Builder joinFromQueryBuilder = new Builder();
-        joinFromQueryBuilder.add(taxonBaseQueryFactory.newTermQuery(queryTermField, queryString), Occur.MUST);
-        joinFromQueryBuilder.add(taxonBaseQueryFactory.newEntityIdsQuery("type.id", edge.getTaxonRelationshipTypes()), Occur.MUST);
+        if(!StringUtils.isEmpty(queryString)){
+            joinFromQueryBuilder.add(taxonBaseQueryFactory.newTermQuery(queryTermField, queryString), Occur.MUST);
+        }
+        joinFromQueryBuilder.add(taxonBaseQueryFactory.newEntityIdsQuery("type.id", edge.getRelationshipTypes()), Occur.MUST);
         if(!includeUnpublished){
             joinFromQueryBuilder.add(taxonBaseQueryFactory.newBooleanQuery(publishField, true), Occur.MUST);
             joinFromQueryBuilder.add(taxonBaseQueryFactory.newBooleanQuery(publishFieldInvers, true), Occur.MUST);
@@ -1562,7 +1565,10 @@ public class TaxonServiceImpl
         finalQueryBuilder.add(joinQuery, Occur.MUST);
 
         if(classification != null){
-            finalQueryBuilder.add(taxonBaseQueryFactory.newEntityIdQuery("taxonNodes.classification.id", classification), Occur.MUST);
+            finalQueryBuilder.add(taxonBaseQueryFactory.newEntityIdQuery(AcceptedTaxonBridge.DOC_KEY_CLASSIFICATION_ID, classification), Occur.MUST);
+        }
+        if(subtree != null){
+            finalQueryBuilder.add(taxonBaseQueryFactory.newTermQuery(AcceptedTaxonBridge.DOC_KEY_TREEINDEX, subtree.treeIndexWc(), true), Occur.MUST);
         }
 
         luceneSearch.setQuery(finalQueryBuilder.build());
@@ -1575,7 +1581,8 @@ public class TaxonServiceImpl
 
     @Override
     public Pager<SearchResult<TaxonBase>> findTaxaAndNamesByFullText(
-            EnumSet<TaxaAndNamesSearchMode> searchModes, String queryString, Classification classification,
+            EnumSet<TaxaAndNamesSearchMode> searchModes, String queryString,
+            Classification classification, TaxonNode subtree,
             Set<NamedArea> namedAreas, Set<PresenceAbsenceTerm> distributionStatus, List<Language> languages,
             boolean highlightFragments, Integer pageSize,
             Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths)
@@ -1592,7 +1599,7 @@ public class TaxonServiceImpl
 
         // convert sets to lists
         List<NamedArea> namedAreaList = null;
-        List<PresenceAbsenceTerm>distributionStatusList = null;
+        List<PresenceAbsenceTerm> distributionStatusList = null;
         if(namedAreas != null){
             namedAreaList = new ArrayList<>(namedAreas.size());
             namedAreaList.addAll(namedAreas);
@@ -1680,7 +1687,8 @@ public class TaxonServiceImpl
             } else if (!searchModes.contains(TaxaAndNamesSearchMode.doTaxa) && searchModes.contains(TaxaAndNamesSearchMode.doSynonyms)) {
                 className = "eu.etaxonomy.cdm.model.taxon.Synonym";
             }
-            luceneSearches.add(prepareFindByFullTextSearch(taxonBaseSubclass, queryString, classification, className,
+            luceneSearches.add(prepareFindByFullTextSearch(taxonBaseSubclass,
+                    queryString, classification, subtree, className,
                     includeUnpublished, languages, highlightFragments, sortFields));
             idFieldMap.put(CdmBaseType.TAXON, "id");
             /* A) does not work!!!!
@@ -1718,7 +1726,7 @@ public class TaxonServiceImpl
                     "inDescription.taxon.id",
                     true,
                     QueryFactory.addTypeRestriction(
-                                createByDescriptionElementFullTextQuery(queryString, classification, null, languages, descriptionElementQueryFactory)
+                                createByDescriptionElementFullTextQuery(queryString, classification, subtree, null, languages, descriptionElementQueryFactory)
                                 , CommonTaxonName.class
                                 ).build(), "id", null, ScoreMode.Max);
             if (logger.isDebugEnabled()){logger.debug("byCommonNameJoinQuery: " + byCommonNameJoinQuery.toString());}
@@ -1776,7 +1784,7 @@ public class TaxonServiceImpl
 
             luceneSearches.add(prepareFindByTaxonRelationFullTextSearch(
                     new TaxonRelationshipEdge(relTypes, Direction.relatedTo),
-                    queryString, classification, includeUnpublished, languages, highlightFragments, sortFields));
+                    queryString, classification, subtree, includeUnpublished, languages, highlightFragments, sortFields));
             idFieldMap.put(CdmBaseType.TAXON, "id");
 
             if(addDistributionFilter){
@@ -1832,7 +1840,7 @@ public class TaxonServiceImpl
 
             luceneSearches.add(prepareFindByTaxonRelationFullTextSearch(
                     new TaxonRelationshipEdge(relTypes, Direction.relatedTo),
-                    queryString, classification, includeUnpublished, languages, highlightFragments, sortFields));
+                    queryString, classification, subtree, includeUnpublished, languages, highlightFragments, sortFields));
             idFieldMap.put(CdmBaseType.TAXON, "id");
 
             if(addDistributionFilter){
@@ -1956,7 +1964,7 @@ public class TaxonServiceImpl
      */
     protected LuceneSearch prepareByDistributionSearch(
             List<NamedArea> namedAreaList, List<PresenceAbsenceTerm> distributionStatusList,
-            Classification classification) throws IOException {
+            Classification classification, TaxonNode subtree) throws IOException {
 
         Builder finalQueryBuilder = new Builder();
 
@@ -1974,7 +1982,10 @@ public class TaxonServiceImpl
         finalQueryBuilder.add(byAreaQuery, Occur.MUST);
 
         if(classification != null){
-            finalQueryBuilder.add(taxonQueryFactory.newEntityIdQuery("taxonNodes.classification.id", classification), Occur.MUST);
+            finalQueryBuilder.add(taxonQueryFactory.newEntityIdQuery(AcceptedTaxonBridge.DOC_KEY_CLASSIFICATION_ID, classification), Occur.MUST);
+        }
+        if(subtree != null){
+            finalQueryBuilder.add(taxonQueryFactory.newTermQuery(AcceptedTaxonBridge.DOC_KEY_TREEINDEX, subtree.treeIndexWc(), true), Occur.MUST);
         }
         BooleanQuery finalQuery = finalQueryBuilder.build();
         logger.info("prepareByAreaSearch() query: " + finalQuery.toString());
@@ -1986,11 +1997,11 @@ public class TaxonServiceImpl
     @Override
     public Pager<SearchResult<TaxonBase>> findByDescriptionElementFullText(
             Class<? extends DescriptionElementBase> clazz, String queryString,
-            Classification classification, List<Feature> features, List<Language> languages,
+            Classification classification, TaxonNode subtree, List<Feature> features, List<Language> languages,
             boolean highlightFragments, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) throws IOException, LuceneParseException {
 
 
-        LuceneSearch luceneSearch = prepareByDescriptionElementFullTextSearch(clazz, queryString, classification, features, languages, highlightFragments);
+        LuceneSearch luceneSearch = prepareByDescriptionElementFullTextSearch(clazz, queryString, classification, subtree, features, languages, highlightFragments);
 
         // --- execute search
         TopGroups<BytesRef> topDocsResultSet;
@@ -2019,12 +2030,13 @@ public class TaxonServiceImpl
 
     @Override
     public Pager<SearchResult<TaxonBase>> findByEverythingFullText(String queryString,
-            Classification classification, boolean includeUnpublished, List<Language> languages, boolean highlightFragments,
+            Classification classification, TaxonNode subtree, boolean includeUnpublished, List<Language> languages, boolean highlightFragments,
             Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) throws IOException, LuceneParseException, LuceneMultiSearchException {
 
-        LuceneSearch luceneSearchByDescriptionElement = prepareByDescriptionElementFullTextSearch(null, queryString, classification,
+        LuceneSearch luceneSearchByDescriptionElement = prepareByDescriptionElementFullTextSearch(null, queryString,
+                classification, subtree,
                 null, languages, highlightFragments);
-        LuceneSearch luceneSearchByTaxonBase = prepareFindByFullTextSearch(null, queryString, classification, null,
+        LuceneSearch luceneSearchByTaxonBase = prepareFindByFullTextSearch(null, queryString, classification, subtree, null,
                 includeUnpublished, languages, highlightFragments, null);
 
         LuceneMultiSearch multiSearch = new LuceneMultiSearch(luceneIndexToolProvider, luceneSearchByDescriptionElement, luceneSearchByTaxonBase);
@@ -2066,7 +2078,7 @@ public class TaxonServiceImpl
      * @return
      */
     protected LuceneSearch prepareByDescriptionElementFullTextSearch(Class<? extends CdmBase> clazz,
-            String queryString, Classification classification, List<Feature> features,
+            String queryString, Classification classification, TaxonNode subtree, List<Feature> features,
             List<Language> languages, boolean highlightFragments) {
 
         LuceneSearch luceneSearch = new LuceneSearch(luceneIndexToolProvider, GroupByTaxonClassBridge.GROUPBY_TAXON_FIELD, DescriptionElementBase.class);
@@ -2074,7 +2086,7 @@ public class TaxonServiceImpl
 
         SortField[] sortFields = new  SortField[]{SortField.FIELD_SCORE, new SortField("inDescription.taxon.titleCache__sort", SortField.Type.STRING, false)};
 
-        BooleanQuery finalQuery = createByDescriptionElementFullTextQuery(queryString, classification, features,
+        BooleanQuery finalQuery = createByDescriptionElementFullTextQuery(queryString, classification, subtree, features,
                 languages, descriptionElementQueryFactory);
 
         luceneSearch.setSortFields(sortFields);
@@ -2095,49 +2107,60 @@ public class TaxonServiceImpl
      * @param descriptionElementQueryFactory
      * @return
      */
-    private BooleanQuery createByDescriptionElementFullTextQuery(String queryString, Classification classification,
-            List<Feature> features, List<Language> languages, QueryFactory descriptionElementQueryFactory) {
+    private BooleanQuery createByDescriptionElementFullTextQuery(String queryString,
+            Classification classification, TaxonNode subtree, List<Feature> features,
+            List<Language> languages, QueryFactory descriptionElementQueryFactory) {
+
         Builder finalQueryBuilder = new Builder();
         Builder textQueryBuilder = new Builder();
-        textQueryBuilder.add(descriptionElementQueryFactory.newTermQuery("titleCache", queryString), Occur.SHOULD);
 
-        // common name
-        Builder nameQueryBuilder = new Builder();
-        if(languages == null || languages.size() == 0){
-            nameQueryBuilder.add(descriptionElementQueryFactory.newTermQuery("name", queryString), Occur.MUST);
-        } else {
-            Builder languageSubQueryBuilder = new Builder();
-            for(Language lang : languages){
-                languageSubQueryBuilder.add(descriptionElementQueryFactory.newTermQuery("language.uuid",  lang.getUuid().toString(), false), Occur.SHOULD);
+        if(!StringUtils.isEmpty(queryString)){
+
+            textQueryBuilder.add(descriptionElementQueryFactory.newTermQuery("titleCache", queryString), Occur.SHOULD);
+
+            // common name
+            Builder nameQueryBuilder = new Builder();
+            if(languages == null || languages.size() == 0){
+                nameQueryBuilder.add(descriptionElementQueryFactory.newTermQuery("name", queryString), Occur.MUST);
+            } else {
+                Builder languageSubQueryBuilder = new Builder();
+                for(Language lang : languages){
+                    languageSubQueryBuilder.add(descriptionElementQueryFactory.newTermQuery("language.uuid",  lang.getUuid().toString(), false), Occur.SHOULD);
+                }
+                nameQueryBuilder.add(descriptionElementQueryFactory.newTermQuery("name", queryString), Occur.MUST);
+                nameQueryBuilder.add(languageSubQueryBuilder.build(), Occur.MUST);
             }
-            nameQueryBuilder.add(descriptionElementQueryFactory.newTermQuery("name", queryString), Occur.MUST);
-            nameQueryBuilder.add(languageSubQueryBuilder.build(), Occur.MUST);
-        }
-        textQueryBuilder.add(nameQueryBuilder.build(), Occur.SHOULD);
+            textQueryBuilder.add(nameQueryBuilder.build(), Occur.SHOULD);
 
 
-        // text field from TextData
-        textQueryBuilder.add(descriptionElementQueryFactory.newMultilanguageTextQuery("text", queryString, languages), Occur.SHOULD);
+            // text field from TextData
+            textQueryBuilder.add(descriptionElementQueryFactory.newMultilanguageTextQuery("text", queryString, languages), Occur.SHOULD);
 
-        // --- TermBase fields - by representation ----
-        // state field from CategoricalData
-        textQueryBuilder.add(descriptionElementQueryFactory.newDefinedTermQuery("stateData.state", queryString, languages), Occur.SHOULD);
+            // --- TermBase fields - by representation ----
+            // state field from CategoricalData
+            textQueryBuilder.add(descriptionElementQueryFactory.newDefinedTermQuery("stateData.state", queryString, languages), Occur.SHOULD);
 
-        // state field from CategoricalData
-        textQueryBuilder.add(descriptionElementQueryFactory.newDefinedTermQuery("stateData.modifyingText", queryString, languages), Occur.SHOULD);
+            // state field from CategoricalData
+            textQueryBuilder.add(descriptionElementQueryFactory.newDefinedTermQuery("stateData.modifyingText", queryString, languages), Occur.SHOULD);
 
-        // area field from Distribution
-        textQueryBuilder.add(descriptionElementQueryFactory.newDefinedTermQuery("area", queryString, languages), Occur.SHOULD);
+            // area field from Distribution
+            textQueryBuilder.add(descriptionElementQueryFactory.newDefinedTermQuery("area", queryString, languages), Occur.SHOULD);
 
-        // status field from Distribution
-        textQueryBuilder.add(descriptionElementQueryFactory.newDefinedTermQuery("status", queryString, languages), Occur.SHOULD);
+            // status field from Distribution
+            textQueryBuilder.add(descriptionElementQueryFactory.newDefinedTermQuery("status", queryString, languages), Occur.SHOULD);
+
+            finalQueryBuilder.add(textQueryBuilder.build(), Occur.MUST);
 
-        finalQueryBuilder.add(textQueryBuilder.build(), Occur.MUST);
+        }
         // --- classification ----
 
+
         if(classification != null){
             finalQueryBuilder.add(descriptionElementQueryFactory.newEntityIdQuery("inDescription.taxon.taxonNodes.classification.id", classification), Occur.MUST);
         }
+        if(subtree != null){
+            finalQueryBuilder.add(descriptionElementQueryFactory.newTermQuery("inDescription.taxon.taxonNodes.treeIndex", subtree.treeIndexWc(), true), Occur.MUST);
+        }
 
         // --- IdentifieableEntity fields - by uuid
         if(features != null && features.size() > 0 ){
@@ -3067,7 +3090,7 @@ public class TaxonServiceImpl
     @Override
     public List<TaxonBase> findTaxaByName(MatchingTaxonConfigurator config){
         List<TaxonBase> taxonList = dao.getTaxaByName(true, config.isIncludeSynonyms(), false, false, false,
-                config.getTaxonNameTitle(), null, MatchMode.EXACT, null, config.isIncludeSynonyms(), null, 0, 0, config.getPropertyPath());
+                config.getTaxonNameTitle(), null, null, MatchMode.EXACT, null, config.isIncludeSynonyms(), null, 0, 0, config.getPropertyPath());
         return taxonList;
     }
 
index 9d7e1051b26420cb47b242a042b7ee02018e16d7..59ffa07c8dd23530f81bcdaf0e977ce5b016edd3 100644 (file)
@@ -16,6 +16,7 @@ import java.util.Set;
 import eu.etaxonomy.cdm.model.location.NamedArea;\r
 import eu.etaxonomy.cdm.model.taxon.Classification;\r
 import eu.etaxonomy.cdm.model.taxon.TaxonBase;\r
+import eu.etaxonomy.cdm.model.taxon.TaxonNode;\r
 import eu.etaxonomy.cdm.persistence.query.MatchMode;\r
 import eu.etaxonomy.cdm.persistence.query.NameSearchOrder;\r
 \r
@@ -43,13 +44,15 @@ public class FindTaxaAndNamesConfiguratorImpl extends IdentifiableServiceConfigu
        private boolean doMisappliedNames = false;\r
        private boolean doIncludeAuthors = false;\r
        private Classification classification = null;\r
-       private List<String> taxonPropertyPath;\r
+       private TaxonNode subtree = null;\r
+    private List<String> taxonPropertyPath;\r
        private List<String> synonymPropertyPath;\r
        private List<String> taxonNamePropertyPath;\r
        private List<String> commonNamePropertyPath;\r
        private Set<NamedArea> namedAreas;\r
        private NameSearchOrder order;\r
 \r
+\r
     /**\r
         * @return the taxonNamePropertyPath\r
         */\r
@@ -217,4 +220,13 @@ public class FindTaxaAndNamesConfiguratorImpl extends IdentifiableServiceConfigu
         this.includeUnpublished = includeUnpublished;\r
     }\r
 \r
+    @Override\r
+    public TaxonNode getSubtree() {\r
+        return subtree;\r
+    }\r
+    @Override\r
+    public void setSubtree(TaxonNode subtree) {\r
+        this.subtree = subtree;\r
+    }\r
+\r
 }\r
index 30e3e439e0f5dd55ffec25866bb76473c8f6bff9..3eade5b86b81d6850f41e67053f513df8e491d07 100644 (file)
@@ -15,6 +15,7 @@ import java.util.Set;
 import eu.etaxonomy.cdm.model.location.NamedArea;\r
 import eu.etaxonomy.cdm.model.taxon.Classification;\r
 import eu.etaxonomy.cdm.model.taxon.TaxonBase;\r
+import eu.etaxonomy.cdm.model.taxon.TaxonNode;\r
 import eu.etaxonomy.cdm.persistence.query.NameSearchOrder;\r
 \r
 /**\r
@@ -38,6 +39,9 @@ public interface IFindTaxaAndNamesConfigurator extends IIdentifiableEntityServic
     public Classification getClassification();\r
     public void setClassification(Classification classification);\r
 \r
+    public TaxonNode getSubtree();\r
+    public void setSubtree(TaxonNode subtree);\r
+\r
     public Set<NamedArea> getNamedAreas();\r
     public void setNamedAreas(Set<NamedArea> areas);\r
 \r
index d98205ddb366016e5cf93a680def7ff5b75e18d0..cf71e08b1551051c5df7738c2f8ee36d541f4b5e 100644 (file)
@@ -47,10 +47,10 @@ public interface IIdentifiableEntityServiceConfigurator<T extends IIdentifiableE
 \r
        public Integer getPageNumber();\r
        public void setPageNumber(Integer pageNumber);\r
-    \r
+\r
        public List<Criterion> getCriteria();\r
     public void setCriteria(List<Criterion> criteria);\r
-    \r
+\r
        public List<Restriction<?>> getRestrictions();\r
        public void setRestrictions(List<Restriction<?>> restrictions);\r
 \r
index 9828cd56078a45065dd63722acb4ba407882b7a5..830eb0bef4beb65b74d0d9dd79a1a9aec3393c9c 100644 (file)
@@ -23,7 +23,8 @@ import eu.etaxonomy.cdm.persistence.query.OrderHint;
  * @author n.hoffmann\r
  * @since 03.03.2009\r
  */\r
-public class IdentifiableServiceConfiguratorImpl<T extends IIdentifiableEntity> implements IIdentifiableEntityServiceConfigurator<T>{\r
+public class IdentifiableServiceConfiguratorImpl<T extends IIdentifiableEntity>\r
+            implements IIdentifiableEntityServiceConfigurator<T>{\r
 \r
     private static final long serialVersionUID = -8126736101861741087L;\r
 \r
index b63dd64a15243c90b494263ed537a93691b9d164..4972afb754313d03d6a3c95d88a7e4bab1df149a 100644 (file)
@@ -20,9 +20,6 @@ import eu.etaxonomy.cdm.model.common.CdmBase;
 public class CdmEntityIdentifier implements Serializable {
 
 
-    /**
-     *
-     */
     private static final long serialVersionUID = 1479948194282284147L;
 
 
index 0b243a7c65e6cf2a33b3de73577e95a3f435594a..51c7724a2217d98109e90444c27d9d15795dc4a5 100755 (executable)
@@ -8,6 +8,7 @@
 */
 package eu.etaxonomy.cdm.api.service.dto;
 
+import eu.etaxonomy.cdm.model.agent.Institution;
 import eu.etaxonomy.cdm.model.occurrence.Collection;
 
 /**
@@ -28,10 +29,12 @@ public class CollectionDTO {
      * @param institute
      * @param townOrLocation
      */
-    public CollectionDTO(String code, String codeStandard, String institute, String townOrLocation) {
+    public CollectionDTO(String code, String codeStandard, Institution institute, String townOrLocation) {
         this.code = code;
         this.codeStandard = codeStandard;
-        this.institute = institute;
+        if (institute != null){
+            this.institute = institute.getTitleCache();
+        }
         this.townOrLocation = townOrLocation;
     }
 
@@ -39,7 +42,7 @@ public class CollectionDTO {
      * @param collection
      */
     public CollectionDTO(Collection collection) {
-        this(collection.getCode(),collection.getCodeStandard(), collection.getInstitute().getTitleCache(),collection.getTownOrLocation());
+        this(collection.getCode(),collection.getCodeStandard(), collection.getInstitute(),collection.getTownOrLocation());
 
     }
 
index 96f2033312a7fe3a26e6c151c083f050f82d9ab8..7fc68ac6d33fd49e272ddab17dfa27c7e23ae43f 100755 (executable)
@@ -15,7 +15,6 @@ import eu.etaxonomy.cdm.model.common.Language;
 import eu.etaxonomy.cdm.model.location.NamedArea;
 import eu.etaxonomy.cdm.model.location.Point;
 import eu.etaxonomy.cdm.model.occurrence.GatheringEvent;
-import eu.etaxonomy.cdm.persistence.dto.TermDto;
 
 /**
  * @author k.luther
@@ -27,7 +26,7 @@ public class GatheringEventDTO {
     private String locality;
     private Point exactLocation;
     private String country;
-    private Set<TermDto> collectingAreas;
+    private Set<String> collectingAreas;
     private String collectingMethod;
     private Integer absoluteElevation;
     private Integer absoluteElevationMax;
@@ -55,7 +54,7 @@ public class GatheringEventDTO {
      * @param distanceToWaterSurfaceMax
      * @param distanceToWaterSurfaceText
      */
-    public GatheringEventDTO(String locality, Point exactLocation, String country, Set<TermDto> collectingAreas,
+    public GatheringEventDTO(String locality, Point exactLocation, String country, Set<String> collectingAreas,
             String collectingMethod, String collector, Integer absoluteElevation, Integer absoluteElevationMax,
             String absoluteElevationText, Double distanceToGround, Double distanceToGroundMax,
             String distanceToGroundText, Double distanceToWaterSurface, Double distanceToWaterSurfaceMax,
@@ -131,11 +130,11 @@ public class GatheringEventDTO {
         }
 
         for (NamedArea area: gathering.getCollectingAreas()){
-            TermDto areaDto = TermDto.fromNamedArea(area);
+            String areaString = area.getLabel();
             if (dto.getCollectingAreas() == null){
                 dto.collectingAreas = new HashSet<>();
             }
-            dto.collectingAreas.add(areaDto);
+            dto.collectingAreas.add(areaString);
         }
 
         return dto;
@@ -151,7 +150,7 @@ public class GatheringEventDTO {
     public String getCountry() {
         return country;
     }
-    public Set<TermDto> getCollectingAreas() {
+    public Set<String> getCollectingAreas() {
         return collectingAreas;
     }
     public String getCollectingMethod() {
index 200b7a8eaa82f23baaf45e689fec0d5b26ba8fc7..b872cbeb1ee6465d5a3e9cda3c00ae8e4bd2f54b 100644 (file)
@@ -74,35 +74,28 @@ public class PreservedSpecimenDTO extends DerivateDTO{
      */
     public PreservedSpecimenDTO(DerivedUnit derivedUnit) {
         super(derivedUnit);
-    }
-
-    public static PreservedSpecimenDTO newInstance(DerivedUnit derivedUnit){
-        PreservedSpecimenDTO newInstance = new PreservedSpecimenDTO(derivedUnit);
-
-//        newInstance.setTitleCache(derivedUnit.getTitleCache());
-
-        newInstance.accessionNumber = derivedUnit.getAccessionNumber();
-        newInstance.preferredStableUri = derivedUnit.getPreferredStableUri();
+        accessionNumber = derivedUnit.getAccessionNumber();
+        preferredStableUri = derivedUnit.getPreferredStableUri();
         if (derivedUnit.getCollection() != null){
-            newInstance.setCollectioDTo(new CollectionDTO(HibernateProxyHelper.deproxy(derivedUnit.getCollection())));
+            setCollectioDTo(new CollectionDTO(HibernateProxyHelper.deproxy(derivedUnit.getCollection())));
         }
-        newInstance.setBarcode(derivedUnit.getBarcode());
-        newInstance.setCatalogNumber(derivedUnit.getCatalogNumber());
-        newInstance.listLabel = derivedUnit.getCatalogNumber();
-        newInstance.setCollectorsNumber(derivedUnit.getCollectorsNumber());
+        setBarcode(derivedUnit.getBarcode());
+        setCatalogNumber(derivedUnit.getCatalogNumber());
+        listLabel = derivedUnit.getCatalogNumber();
+        setCollectorsNumber(derivedUnit.getCollectorsNumber());
         if (derivedUnit.getDerivedFrom() != null){
-            newInstance.setDerivationEvent(new DerivationEventDTO(derivedUnit.getDerivedFrom() ));
+            setDerivationEvent(new DerivationEventDTO(derivedUnit.getDerivedFrom() ));
         }
         if (derivedUnit.getPreservation()!= null){
-            newInstance.setPreservationMethod(derivedUnit.getPreservation().getMaterialMethodText());
+            setPreservationMethod(derivedUnit.getPreservation().getMaterialMethodText());
         }
-        newInstance.setRecordBase(derivedUnit.getRecordBasis().getMessage());
-        newInstance.setSources(derivedUnit.getSources());
-        newInstance.setSpecimenTypeDesignations(derivedUnit.getSpecimenTypeDesignations());
-
-        return newInstance;
+        setRecordBase(derivedUnit.getRecordBasis().getMessage());
+        setSources(derivedUnit.getSources());
+        setSpecimenTypeDesignations(derivedUnit.getSpecimenTypeDesignations());
     }
 
+
+
     public String getAccessionNumber() {
         return accessionNumber;
     }
index 61439ebe85065bbe73fb4b7f8d44c2f55ce6eadd..6abd659e58b0162d95cf9719b0368a262d52de3c 100755 (executable)
@@ -13,7 +13,6 @@ import java.net.URISyntaxException;
 import java.util.HashSet;
 import java.util.Set;
 
-import eu.etaxonomy.cdm.model.common.DefinedTerm;
 import eu.etaxonomy.cdm.model.media.Media;
 import eu.etaxonomy.cdm.model.molecular.Sequence;
 import eu.etaxonomy.cdm.model.molecular.SequenceString;
@@ -29,7 +28,7 @@ public class SequenceDTO {
 
     private Media contigFile;
 
-    private SequenceString consensusSequence = SequenceString.NewInstance();
+    private SequenceString consensusSequence;
 
 
     private Boolean isBarcode = null;
@@ -47,7 +46,7 @@ public class SequenceDTO {
     private Set<SingleReadAlignment> singleReadAlignments = new HashSet<SingleReadAlignment>();
 
 
-    private DefinedTerm dnaMarker;
+    private String dnaMarker;
 
 
 
@@ -69,7 +68,7 @@ public class SequenceDTO {
         geneticAccessionNumber = seq.getGeneticAccessionNumber();
         boldProcessId = seq.getBoldProcessId();
         singleReadAlignments = seq.getSingleReadAlignments();
-        dnaMarker = seq.getDnaMarker();
+        dnaMarker = seq.getDnaMarker().getLabel();
         haplotype = seq.getHaplotype();
         citations = seq.getCitations();
         try{
@@ -176,7 +175,7 @@ public class SequenceDTO {
     /**
      * @return the dnaMarker
      */
-    public DefinedTerm getDnaMarker() {
+    public String getDnaMarker() {
         return dnaMarker;
     }
 
diff --git a/cdmlib-services/src/main/java/eu/etaxonomy/cdm/api/service/dto/SourceDTO.java b/cdmlib-services/src/main/java/eu/etaxonomy/cdm/api/service/dto/SourceDTO.java
new file mode 100644 (file)
index 0000000..5da671b
--- /dev/null
@@ -0,0 +1,24 @@
+/**
+* Copyright (C) 2018 EDIT
+* European Distributed Institute of Taxonomy
+* http://www.e-taxonomy.eu
+*
+* The contents of this file are subject to the Mozilla Public License Version 1.1
+* See LICENSE.TXT at the top of this package for the full license terms.
+*/
+package eu.etaxonomy.cdm.api.service.dto;
+
+import java.util.UUID;
+
+/**
+ * @author a.kohlbecker
+ * @since Aug 31, 2018
+ *
+ */
+public class SourceDTO {
+
+    private UUID uuid;
+    String citation;
+    String citationDetail;
+
+}
index 2290875a1fef1f1917b37a1cc7ae5aa2eeb8c685..8b899d5fc9e7ec89dbc2912be6fa183179fbfe04 100644 (file)
@@ -38,9 +38,8 @@ public class TaxonRelationshipsDTO {
 
     private static final String SENSU_SEPARATOR = ", ";
 
-    public class TaxonRelation{
+    public class TaxonRelationDTO{
 
-        private UUID relationUuid;
         private boolean doubtful = false;
         private boolean misapplication = false;
         private boolean synonym = false;
@@ -51,14 +50,15 @@ public class TaxonRelationshipsDTO {
         //TODO maybe this will be changed in future
         private TermDto type;
         private UUID typeUuid;
+        private SourceDTO sec;
+        private SourceDTO relSec;
 
 
-        public TaxonRelation(TaxonRelationship relation, Direction direction, List<Language> languages) {
+        public TaxonRelationDTO(TaxonRelationship relation, Direction direction, List<Language> languages) {
             Taxon relatedTaxon = direction == Direction.relatedTo? relation.getToTaxon()
                     : relation.getFromTaxon();
             this.taxonUuid = relatedTaxon.getUuid();
             this.doubtful = relation.isDoubtful();
-            this.relationUuid = relation.getUuid();
             this.direction = direction;
             TaxonRelationshipType relType = relation.getType();
 
@@ -93,12 +93,6 @@ public class TaxonRelationshipsDTO {
         public void setDoubtful(boolean doubtful) {
             this.doubtful = doubtful;
         }
-        public UUID getRelationUuid() {
-            return relationUuid;
-        }
-        public void setRelationUuid(UUID relationUuid) {
-            this.relationUuid = relationUuid;
-        }
 
         public Direction getDirection() {
             return direction;
@@ -167,7 +161,7 @@ public class TaxonRelationshipsDTO {
 
     }
 
-    private List<TaxonRelation> relations = new ArrayList<>();
+    private List<TaxonRelationDTO> relations = new ArrayList<>();
 
     private List<List<TaggedText>> misapplications = new ArrayList<>();
 
@@ -184,15 +178,15 @@ public class TaxonRelationshipsDTO {
 
  // ************************** GETTER / SETTER  ***********************/
 
-    public List<TaxonRelation> getRelations() {
+    public List<TaxonRelationDTO> getRelations() {
         return relations;
     }
 
-    public void setIncludedTaxa(List<TaxonRelation> relations) {
+    public void setIncludedTaxa(List<TaxonRelationDTO> relations) {
         this.relations = relations;
     }
 
-    public void addRelation(TaxonRelation relation){
+    public void addRelation(TaxonRelationDTO relation){
         relations.add(relation);
     }
 
@@ -200,8 +194,8 @@ public class TaxonRelationshipsDTO {
      * @param relation
      * @param direction
      */
-    public TaxonRelation addRelation(TaxonRelationship relation, Direction direction, List<Language> languages) {
-        TaxonRelation newRelation = new TaxonRelation(relation, direction, languages);
+    public TaxonRelationDTO addRelation(TaxonRelationship relation, Direction direction, List<Language> languages) {
+        TaxonRelationDTO newRelation = new TaxonRelationDTO(relation, direction, languages);
         relations.add(newRelation);
         return newRelation;
     }
@@ -213,7 +207,7 @@ public class TaxonRelationshipsDTO {
     public void createMisapplicationString() {
         List<List<TaggedText>> result = new ArrayList<>();
 
-        for (TaxonRelation relation: relations){
+        for (TaxonRelationDTO relation: relations){
             if (relation.isMisapplication()){
                 List<TaggedText> tags = relation.getTaggedText();
 
@@ -331,7 +325,7 @@ public class TaxonRelationshipsDTO {
     }
 //
 //    public boolean contains(UUID taxonUuid) {
-//        for (TaxonRelation relation: relations){
+//        for (TaxonRelationDTO relation: relations){
 //            if (taxon.taxonUuid.equals(taxonUuid)){
 //                return true;
 //            }
@@ -342,7 +336,7 @@ public class TaxonRelationshipsDTO {
     @Override
     public String toString(){
         String result = "";
-        for (TaxonRelation relation : relations){
+        for (TaxonRelationDTO relation : relations){
             result += relation.toString() + ",";
         }
         if (result.length() > 0){
old mode 100644 (file)
new mode 100755 (executable)
index 2d09087..189be5c
@@ -13,17 +13,19 @@ import java.util.Comparator;
 import eu.etaxonomy.cdm.model.name.TypeDesignationBase;
 
 /**
- * @author pplitzner
- * @since May 3, 2018
+ * @author k.luther
+ * @since 06.09.2018
  *
  */
 public class TypeDesignationComparator implements Comparator<TypeDesignationBase> {
 
+
     private TypeDesignationStatusComparator statusComparator = new TypeDesignationStatusComparator();
 
     @SuppressWarnings("unchecked")
     @Override
     public int compare(TypeDesignationBase o1, TypeDesignationBase o2) {
+
         if(o1==null){
             return 1;
         }
@@ -33,9 +35,9 @@ public class TypeDesignationComparator implements Comparator<TypeDesignationBase
         if(o1.getTypeStatus()==null){
             return 1;
         }
-        if(o2.getTypeStatus()==null){
+        if(o2.getUuid()==null){
             return-1;
         }
         return statusComparator.compare(o1.getTypeStatus(), o2.getTypeStatus()) ;
     }
-}
+}
\ No newline at end of file
index f86211c8133a9a56bce3ae90ad395a01bb212ea7..9664569fa8a58fc7b5975ad8cde36757280864d7 100644 (file)
@@ -9,7 +9,6 @@
 package eu.etaxonomy.cdm.api.service.name;
 
 import java.util.ArrayList;
-import java.util.Arrays;
 import java.util.Collection;
 import java.util.Collections;
 import java.util.Comparator;
@@ -20,6 +19,7 @@ import java.util.List;
 import java.util.Map;
 import java.util.Optional;
 import java.util.Set;
+import java.util.UUID;
 
 import org.apache.commons.lang3.StringUtils;
 import org.hibernate.search.hcore.util.impl.HibernateHelper;
@@ -31,6 +31,7 @@ import eu.etaxonomy.cdm.model.common.CdmBase;
 import eu.etaxonomy.cdm.model.common.IdentifiableSource;
 import eu.etaxonomy.cdm.model.common.TermVocabulary;
 import eu.etaxonomy.cdm.model.common.VersionableEntity;
+import eu.etaxonomy.cdm.model.name.HomotypicalGroup;
 import eu.etaxonomy.cdm.model.name.NameTypeDesignation;
 import eu.etaxonomy.cdm.model.name.SpecimenTypeDesignation;
 import eu.etaxonomy.cdm.model.name.TaxonName;
@@ -40,11 +41,13 @@ import eu.etaxonomy.cdm.model.occurrence.DerivedUnit;
 import eu.etaxonomy.cdm.model.occurrence.FieldUnit;
 import eu.etaxonomy.cdm.model.occurrence.MediaSpecimen;
 import eu.etaxonomy.cdm.model.occurrence.SpecimenOrObservationBase;
+import eu.etaxonomy.cdm.model.reference.Reference;
 import eu.etaxonomy.cdm.ref.EntityReference;
 import eu.etaxonomy.cdm.ref.TypedEntityReference;
 import eu.etaxonomy.cdm.strategy.cache.TagEnum;
 import eu.etaxonomy.cdm.strategy.cache.TaggedText;
 import eu.etaxonomy.cdm.strategy.cache.TaggedTextBuilder;
+import eu.etaxonomy.cdm.strategy.cache.reference.DefaultReferenceCacheStrategy;
 /**
  * Manages a collection of {@link TypeDesignationBase TypeDesignations} for the same typified name.
  *
@@ -78,8 +81,13 @@ public class TypeDesignationSetManager {
     private static final String TYPE_SEPARATOR = "; ";
 
     private static final String TYPE_DESIGNATION_SEPARATOR = ", ";
-
-    private Collection<TypeDesignationBase> typeDesignations;
+    private static final String TYPE_STATUS_SEPARATOR_WITHCITATION = ": ";
+    private static final String TYPE_STATUS_PARENTHESIS_LEFT = " (";
+    private static final String TYPE_STATUS_PARENTHESIS_RIGHT = ")";
+    private static final String REFERENCE_PARENTHESIS_RIGHT = "]";
+    private static final String REFERENCE_PARENTHESIS_LEFT = " [";
+    private static final String REFERENCE_FIDE = "fide ";
+    private Map<UUID,TypeDesignationBase> typeDesignations;
 
     private NameTypeBaseEntityType nameTypeBaseEntityType = NameTypeBaseEntityType.NAME_TYPE_DESIGNATION;
 
@@ -100,6 +108,7 @@ public class TypeDesignationSetManager {
     private List<String> problems = new ArrayList<>();
 
     private boolean printCitation = false;
+    private boolean useShortCitation = false;
 
     private List<TaggedText> taggedText;
 
@@ -110,16 +119,38 @@ public class TypeDesignationSetManager {
      *
      */
     public TypeDesignationSetManager(Collection<TypeDesignationBase> typeDesignations) throws RegistrationValidationException {
-        this.typeDesignations = typeDesignations;
+        if (this.typeDesignations == null){
+            this.typeDesignations = new HashMap();
+        }
+        for (TypeDesignationBase typeDes:typeDesignations){
+            this.typeDesignations.put(typeDes.getUuid(), typeDes);
+        }
         findTypifiedName();
         mapAndSort();
     }
 
+    /**
+     * @param containgEntity
+     * @param taxonName
+     * @throws RegistrationValidationException
+     *
+     */
+    public TypeDesignationSetManager(HomotypicalGroup group) throws RegistrationValidationException {
+        if (this.typeDesignations == null){
+            this.typeDesignations = new HashMap();
+        }
+        for (TypeDesignationBase typeDes:group.getTypeDesignations()){
+            this.typeDesignations.put(typeDes.getUuid(), typeDes);
+        }
+        //findTypifiedName();
+        mapAndSort();
+    }
+
     /**
      * @param typifiedName2
      */
     public TypeDesignationSetManager(TaxonName typifiedName) {
-        this.typeDesignations = new ArrayList<>();
+        this.typeDesignations = new HashMap<>();
         this.typifiedNameRef = new EntityReference(typifiedName.getUuid(), typifiedName.getTitleCache());
     }
 
@@ -131,7 +162,9 @@ public class TypeDesignationSetManager {
      * @param typeDesignations
      */
     public void addTypeDesigations(CdmBase containgEntity, TypeDesignationBase ... typeDesignations){
-       this.typeDesignations.addAll(Arrays.asList(typeDesignations));
+        for (TypeDesignationBase typeDes: typeDesignations){
+            this.typeDesignations.put(typeDes.getUuid(), typeDes);
+        }
        mapAndSort();
     }
 
@@ -143,7 +176,8 @@ public class TypeDesignationSetManager {
     protected void mapAndSort() {
         finalString = null;
         Map<TypedEntityReference, TypeDesignationWorkingSet> byBaseEntityByTypeStatus = new HashMap<>();
-        this.typeDesignations.forEach(td -> mapTypeDesignation(byBaseEntityByTypeStatus, td));
+
+        this.typeDesignations.values().forEach(td -> mapTypeDesignation(byBaseEntityByTypeStatus, td));
         orderedByTypesByBaseEntity = orderByTypeByBaseEntity(byBaseEntityByTypeStatus);
     }
 
@@ -166,13 +200,11 @@ public class TypeDesignationSetManager {
                     td.getUuid(),
                     stringify(td));
 
-            TypeDesignationWorkingSet typedesignationWorkingSet;
             if(!byBaseEntityByTypeStatus.containsKey(baseEntityReference)){
                 byBaseEntityByTypeStatus.put(baseEntityReference, new TypeDesignationWorkingSet(baseEntity, baseEntityReference));
             }
+            byBaseEntityByTypeStatus.get(baseEntityReference).insert(status, typeDesignationEntityReference);
 
-            typedesignationWorkingSet = byBaseEntityByTypeStatus.get(baseEntityReference);
-            typedesignationWorkingSet.insert(status, typeDesignationEntityReference);
         } catch (DataIntegrityException e){
             problems.add(e.getMessage());
         }
@@ -313,9 +345,9 @@ public class TypeDesignationSetManager {
                         workingsetBuilder.add(TagEnum.separator, TYPE_SEPARATOR);
                     }
                     boolean isNameTypeDesignation = false;
-                    if(SpecimenOrObservationBase.class.isAssignableFrom(baseEntityRef.getType())){
+                    if(SpecimenOrObservationBase.class.isAssignableFrom(baseEntityRef.getType()) ){
                         workingsetBuilder.add(TagEnum.label, "Type:");
-                    } else {
+                    } else{
                         workingsetBuilder.add(TagEnum.label, "NameType:");
                         isNameTypeDesignation = true;
                     }
@@ -327,18 +359,95 @@ public class TypeDesignationSetManager {
                     for(TypeDesignationStatusBase<?> typeStatus : typeDesignationWorkingSet.keySet()) {
                         if(typeStatusCount++  > 0){
                             workingsetBuilder.add(TagEnum.separator, TYPE_STATUS_SEPARATOR);
+
                         }
                         boolean isPlural = typeDesignationWorkingSet.get(typeStatus).size() > 1;
                         if(!typeStatus.equals(NULL_STATUS)) {
+
                             workingsetBuilder.add(TagEnum.label, typeStatus.getLabel() + (isPlural ? "s:" : ","));
+                         }
+
+
+                        int typeDesignationCount = 0;
+                        for(TypedEntityReference typeDesignationEntityReference : createSortedList(typeDesignationWorkingSet, typeStatus)) {
+
+                            if(typeDesignationCount++  > 0){
+                               workingsetBuilder.add(TagEnum.separator, TYPE_DESIGNATION_SEPARATOR);
+                            }
+
+                            workingsetBuilder.add(TagEnum.typeDesignation, typeDesignationEntityReference.getLabel(), typeDesignationEntityReference);
+
+                        }
+
+                    }
+                    typeDesignationWorkingSet.setRepresentation(workingsetBuilder.toString());
+                    finalString += typeDesignationWorkingSet.getRepresentation();
+                    finalBuilder.addAll(workingsetBuilder);
+                }
+            }
+            finalString = finalString.trim();
+            taggedText = finalBuilder.getTaggedText();
+        }
+    }
+
+    public void buildStringWithCitation(){
+
+        if(finalString == null){
+
+            TaggedTextBuilder finalBuilder = new TaggedTextBuilder();
+            finalString = "";
+
+            if(getTypifiedNameCache() != null){
+                finalString += getTypifiedNameCache() + " ";
+                finalBuilder.add(TagEnum.name, getTypifiedNameCache(), new TypedEntityReference<>(TaxonName.class, getTypifiedNameRef().getUuid()));
+            }
+
+            int typeCount = 0;
+            if(orderedByTypesByBaseEntity != null){
+                for(TypedEntityReference baseEntityRef : orderedByTypesByBaseEntity.keySet()) {
+
+                    TaggedTextBuilder workingsetBuilder = new TaggedTextBuilder();
+                    if(typeCount++ > 0){
+                        workingsetBuilder.add(TagEnum.separator, TYPE_SEPARATOR);
+                    }
+                    boolean isNameTypeDesignation = false;
+
+                    if(!baseEntityRef.getLabel().isEmpty()){
+                        workingsetBuilder.add(TagEnum.specimenOrObservation, baseEntityRef.getLabel(), baseEntityRef);
+                    }
+                    TypeDesignationWorkingSet typeDesignationWorkingSet = orderedByTypesByBaseEntity.get(baseEntityRef);
+                    int typeStatusCount = 0;
+                    for(TypeDesignationStatusBase<?> typeStatus : typeDesignationWorkingSet.keySet()) {
+                        if(typeStatusCount++  > 0){
+                            workingsetBuilder.add(TagEnum.separator, TYPE_STATUS_SEPARATOR);
+
                         }
+                        boolean isPlural = typeDesignationWorkingSet.get(typeStatus).size() > 1;
+                        if(!typeStatus.equals(NULL_STATUS)) {
+                            workingsetBuilder.add(TagEnum.separator, TYPE_STATUS_PARENTHESIS_LEFT);
+                            workingsetBuilder.add(TagEnum.label, typeStatus.getLabel() + (isPlural ? "s:" : ":"));
+                         }
                         int typeDesignationCount = 0;
-                        for(TypedEntityReference typeDesignationEntityReference : typeDesignationWorkingSet.get(typeStatus)) {
+                        for(TypedEntityReference typeDesignationEntityReference : createSortedList(typeDesignationWorkingSet, typeStatus)) {
                             if(typeDesignationCount++  > 0){
-                                workingsetBuilder.add(TagEnum.separator, TYPE_DESIGNATION_SEPARATOR);
+                               workingsetBuilder.add(TagEnum.separator, TYPE_DESIGNATION_SEPARATOR);
                             }
+
                             workingsetBuilder.add(TagEnum.typeDesignation, typeDesignationEntityReference.getLabel(), typeDesignationEntityReference);
+
+                            TypeDesignationBase typeDes =  typeDesignations.get(typeDesignationEntityReference.getUuid());
+                            if (typeDes.getCitation() != null){
+                               // workingsetBuilder.add(TagEnum.separator, REFERENCE_PARENTHESIS_LEFT);
+                                String shortCitation = ((DefaultReferenceCacheStrategy)typeDes.getCitation().getCacheStrategy()).createShortCitation(typeDes.getCitation());
+                                workingsetBuilder.add(TagEnum.reference, shortCitation, typeDesignationEntityReference);
+                                //workingsetBuilder.add(TagEnum.separator, REFERENCE_PARENTHESIS_RIGHT);
+                            }
+
+                            if ((!typeStatus.equals(NULL_STATUS)) &&(typeDesignationCount ==  typeDesignationWorkingSet.get(typeStatus).size())){
+                                workingsetBuilder.add(TagEnum.separator, TYPE_STATUS_PARENTHESIS_RIGHT);
+                            }
                         }
+
                     }
                     typeDesignationWorkingSet.setRepresentation(workingsetBuilder.toString());
                     finalString += typeDesignationWorkingSet.getRepresentation();
@@ -350,6 +459,19 @@ public class TypeDesignationSetManager {
         }
     }
 
+    /**
+     * @param typeDesignationWorkingSet
+     * @param typeStatus
+     * @return
+     */
+    private List<TypedEntityReference<TypeDesignationBase>> createSortedList(
+            TypeDesignationWorkingSet typeDesignationWorkingSet, TypeDesignationStatusBase<?> typeStatus) {
+        List<TypedEntityReference<TypeDesignationBase>> typeDesignationEntityrReferences = new ArrayList(typeDesignationWorkingSet.get(typeStatus));
+        Collections.sort(typeDesignationEntityrReferences, new TypedEntityComparator());
+        return typeDesignationEntityrReferences;
+    }
+
+
     /**
      * FIXME use the validation framework validators to store the validation problems!!!
      *
@@ -362,7 +484,7 @@ public class TypeDesignationSetManager {
 
         TaxonName typifiedName = null;
 
-        for(TypeDesignationBase<?> typeDesignation : typeDesignations){
+        for(TypeDesignationBase<?> typeDesignation : typeDesignations.values()){
             typeDesignation.getTypifiedNames();
             if(typeDesignation.getTypifiedNames().isEmpty()){
 
@@ -424,7 +546,7 @@ public class TypeDesignationSetManager {
      * @return
      */
     public Collection<TypeDesignationBase> getTypeDesignations() {
-        return typeDesignations;
+        return typeDesignations.values();
     }
 
     /**
@@ -432,13 +554,7 @@ public class TypeDesignationSetManager {
      * @return
      */
     public TypeDesignationBase findTypeDesignation(EntityReference typeDesignationRef) {
-        for(TypeDesignationBase td : typeDesignations){
-            if(td.getUuid().equals(typeDesignationRef.getUuid())){
-                return td;
-            }
-        }
-        // TODO Auto-generated method stub
-        return null;
+        return this.typeDesignations.get(typeDesignationRef.getUuid());
     }
 
 
@@ -519,7 +635,7 @@ public class TypeDesignationSetManager {
                         if(msp.getMediaSpecimen() != null){
                             for(IdentifiableSource source : msp.getMediaSpecimen().getSources()){
                                 String refDetailStr = source.getCitationMicroReference();
-                                String referenceStr = source.getCitation().getTitleCache();
+                                String referenceStr = source.getCitation() == null? "": source.getCitation().getTitleCache();
                                 if(StringUtils.isNotBlank(source.getCitationMicroReference())){
                                     typeSpecimenTitle += refDetailStr;
                                 }
@@ -532,6 +648,7 @@ public class TypeDesignationSetManager {
                     } else {
                         DerivedUnitFacadeCacheStrategy cacheStrategy = new DerivedUnitFacadeCacheStrategy();
                         typeSpecimenTitle += cacheStrategy.getTitleCache(du, true);
+
                     }
 
                     result += (isMediaSpecimen ? "[icon] " : "") + typeSpecimenTitle.trim();
@@ -540,10 +657,12 @@ public class TypeDesignationSetManager {
         }
 
         if(isPrintCitation() && td.getCitation() != null){
-            if(td.getCitation().getAbbrevTitle() != null){
-                result += " " + td.getCitation().getAbbrevTitle();
+            Reference citation = HibernateProxyHelper.deproxy(td.getCitation(), Reference.class);
+            if(citation.getAbbrevTitle() != null){
+
+                result += " " + citation.getAbbrevTitle();
             } else {
-                result += " " + td.getCitation().getTitleCache();
+                result += " " + citation.getTitleCache();
             }
             if(td.getCitationMicroReference() != null){
                 result += " :" + td.getCitationMicroReference();
@@ -605,6 +724,12 @@ public class TypeDesignationSetManager {
         return taggedText;
     }
 
+    public List<TaggedText> toTaggedTextWithCitation() {
+        buildStringWithCitation();
+        return taggedText;
+    }
+
+
     /**
      * @return the printCitation
      */
@@ -634,6 +759,14 @@ public class TypeDesignationSetManager {
         return nameTypeBaseEntityType;
     }
 
+    public boolean isUseShortCitation() {
+        return useShortCitation;
+    }
+
+    public void setUseShortCitation(boolean useShortCitation) {
+        this.useShortCitation = useShortCitation;
+    }
+
     /**
      * TypeDesignations which refer to the same FieldUnit (SpecimenTypeDesignation) or TaxonName
      * (NameTypeDesignation) form a working set. The <code>TypeDesignationWorkingSet</code> internally
@@ -677,6 +810,8 @@ public class TypeDesignationSetManager {
             return typeDesignations;
         }
 
+
+
         /**
          * @param status
          * @param typeDesignationEntityReference
diff --git a/cdmlib-services/src/main/java/eu/etaxonomy/cdm/api/service/name/TypedEntityComparator.java b/cdmlib-services/src/main/java/eu/etaxonomy/cdm/api/service/name/TypedEntityComparator.java
new file mode 100644 (file)
index 0000000..61d3ef1
--- /dev/null
@@ -0,0 +1,43 @@
+/**
+* Copyright (C) 2018 EDIT
+* European Distributed Institute of Taxonomy
+* http://www.e-taxonomy.eu
+*
+* The contents of this file are subject to the Mozilla Public License Version 1.1
+* See LICENSE.TXT at the top of this package for the full license terms.
+*/
+package eu.etaxonomy.cdm.api.service.name;
+
+import java.util.Comparator;
+
+import eu.etaxonomy.cdm.model.name.TypeDesignationBase;
+import eu.etaxonomy.cdm.ref.TypedEntityReference;
+
+/**
+ * @author pplitzner
+ * @since May 3, 2018
+ *
+ */
+public class TypedEntityComparator implements Comparator<TypedEntityReference<TypeDesignationBase> >{
+
+
+
+    @Override
+    @SuppressWarnings("unchecked")
+    public int compare(TypedEntityReference<TypeDesignationBase> o1, TypedEntityReference<TypeDesignationBase> o2) {
+        if(o1==null){
+            return 1;
+        }
+        if(o2==null){
+            return -1;
+        }
+        if(o1.getUuid()==null){
+            return 1;
+        }
+        if(o2.getUuid()==null){
+            return-1;
+        }
+
+        return o1.getLabel().compareTo(o2.getLabel());
+    }
+}
index b9916a4b7615c887b717b6d972fa1ed0e5477ae9..ace054c52b65052123d080cdf24a1bba256b0858 100644 (file)
@@ -90,6 +90,7 @@ public class LuceneIndexToolProviderImpl implements ILuceneIndexToolProvider {
             if(!queryParsers.containsKey(clazz)){
                 Analyzer analyzer = getAnalyzerFor(clazz);
                 QueryParser parser = new QueryParser(DEFAULT_QURERY_FIELD_NAME, analyzer);
+                parser.setAllowLeadingWildcard(true);
                 queryParsers.put(clazz, parser);
             }
             return queryParsers.get(clazz);
@@ -97,6 +98,7 @@ public class LuceneIndexToolProviderImpl implements ILuceneIndexToolProvider {
             if(!complexPhraseQueryParsers.containsKey(clazz)){
                 Analyzer analyzer = getAnalyzerFor(clazz);
                 QueryParser parser = new ComplexPhraseQueryParser(DEFAULT_QURERY_FIELD_NAME, analyzer);
+                parser.setAllowLeadingWildcard(true);
                 complexPhraseQueryParsers.put(clazz, parser);
             }
             return complexPhraseQueryParsers.get(clazz);
index 85f81162e65d1bf0d44506969e46903791f01fb6..8dc491b014ca9bf20b6f54493ffe27df63752f4f 100644 (file)
@@ -34,15 +34,11 @@ import org.apache.lucene.search.QueryWrapperFilter;
 import org.apache.lucene.search.ScoreDoc;
 import org.apache.lucene.search.TermQuery;
 import org.apache.lucene.search.TopDocs;
-import org.apache.lucene.search.WildcardQuery;
 import org.apache.lucene.search.join.JoinUtil;
 import org.apache.lucene.search.join.ScoreMode;
 import org.hibernate.search.engine.ProjectionConstants;
-import org.hibernate.search.spatial.impl.Point;
 import org.hibernate.search.spatial.impl.Rectangle;
 
-import eu.etaxonomy.cdm.hibernate.search.DefinedTermBaseClassBridge;
-import eu.etaxonomy.cdm.hibernate.search.MultilanguageTextFieldBridge;
 import eu.etaxonomy.cdm.hibernate.search.NotNullAwareIdBridge;
 import eu.etaxonomy.cdm.model.common.CdmBase;
 import eu.etaxonomy.cdm.model.common.IdentifiableEntity;
@@ -97,6 +93,15 @@ public class QueryFactory {
      * Creates a new Term query. Depending on whether <code>isTextField</code> is set true or not the
      * supplied <code>queryString</code> will be parsed by using the according analyzer or not.
      * Setting <code>isTextField</code> to <code>false</code> is useful for searching for uuids etc.
+     * <p>
+     * The appropriate query type is determined by the query strnig:
+     * <ul>
+     * <li>Lactuca ==> TermQuery.class </li>
+     * <li>Lactuca perennis ==> BooleanQuery.class </li>
+     * <li>Lactu* ==> PrefixQuery.class</li>
+     * <li>"Lactuca perennis" ==> PhraseQuery.class</li>
+     * </ul>
+     *
      *
      * @param fieldName
      * @param queryString
@@ -112,12 +117,13 @@ public class QueryFactory {
         String luceneQueryString = fieldName + ":(" + queryString + ")";
         if (isTextField) {
             queryString = queryString.trim();
-            boolean isComplexPhraseQuery = queryString.matches("^\\\".*\\s+.*[\\*].*\\\"$");
+            // ^\"(.*\s+.*[\*].*|.*[\*].*\s+.*)\"$ matches phrase query strings with wildcards like '"Lactuca per*"'
+            boolean isComplexPhraseQuery = queryString.matches("^\\\"(.*\\s+.*[\\*].*|.*[\\*].*\\s+.*)\\\"$");
             textFieldNames.add(fieldName);
-            // in order to support the full query syntax we must use the parser
-            // here
+            // in order to support the full query syntax we must use the parser here
             try {
-                return toolProvider.getQueryParserFor(cdmBaseType, isComplexPhraseQuery).parse(luceneQueryString);
+                Query termQuery = toolProvider.getQueryParserFor(cdmBaseType, isComplexPhraseQuery).parse(luceneQueryString);
+                return termQuery;
             } catch (ParseException e) {
                 logger.error(e);
             }
index 783adc4339749a3e89041e5e33a0483405ec925f..f87a6a83a3112ec4f507238196afc55390e3b7bd 100644 (file)
@@ -25,28 +25,28 @@ import eu.etaxonomy.cdm.model.common.RelationshipTermBase;
  */
 public class AbstractRelationshipEdge<T extends RelationshipTermBase> {
 
-    private Set<T> taxonRelationshipTypes;
+    private Set<T> relationshipTypes;
     private EnumSet<Direction> directions;
 
-    public AbstractRelationshipEdge(T taxonRelationshipType, Direction ... direction) {
+    public AbstractRelationshipEdge(T relationshipType, Direction ... direction) {
         super();
-        this.taxonRelationshipTypes = new HashSet<>();
-        this.taxonRelationshipTypes.add(taxonRelationshipType);
+        this.relationshipTypes = new HashSet<>();
+        this.relationshipTypes.add(relationshipType);
         directions = EnumSet.copyOf(Arrays.asList(direction));
     }
 
-    public AbstractRelationshipEdge(Set<T> taxonRelationshipTypes, Direction ... direction) {
+    public AbstractRelationshipEdge(Set<T> relationshipTypes, Direction ... direction) {
         super();
-        this.taxonRelationshipTypes = taxonRelationshipTypes;
+        this.relationshipTypes = relationshipTypes;
         directions = EnumSet.copyOf(Arrays.asList(direction));
     }
 
-    public Set<T> getTaxonRelationshipTypes() {
-        return taxonRelationshipTypes;
+    public Set<T> getRelationshipTypes() {
+        return relationshipTypes;
     }
 
-    public void setTaxonRelationshipTypes(Set<T> taxonRelationshipTypes) {
-        this.taxonRelationshipTypes = taxonRelationshipTypes;
+    public void setRelationshipTypes(Set<T> relationshipTypes) {
+        this.relationshipTypes = relationshipTypes;
     }
 
     public EnumSet<Direction> getDirections() {
index 7dd3aa81768c5bdaea5e77705158b841109199b3..6f17ea1358a6b62da570839eeaac111dab4d613a 100644 (file)
@@ -151,9 +151,6 @@ public class ClassificationServiceImplTest extends CdmTransactionalIntegrationTe
     }
 
 
-    /**
-     * Test method for {@link eu.etaxonomy.cdm.api.service.ClassificationServiceImpl#loadRankSpecificRootNodes(eu.etaxonomy.cdm.model.taxon.Classification, eu.etaxonomy.cdm.model.name.Rank, java.util.List)}.
-     */
     @Test
     @DataSet
     public final void testListRankSpecificRootNodes(){
@@ -173,32 +170,32 @@ public class ClassificationServiceImplTest extends CdmTransactionalIntegrationTe
         //
         // for more historic Acacia taxonomy see http://lexikon.freenet.de/Akazien
 
-        List<TaxonNode> taxonNodes = service.listRankSpecificRootNodes(null, null, includeUnpublished, null, null, NODE_INIT_STRATEGY);
+        List<TaxonNode> taxonNodes = service.listRankSpecificRootNodes(null, null, null, includeUnpublished, null, null, NODE_INIT_STRATEGY);
         Assert.assertEquals(2, taxonNodes.size());
 
-        taxonNodes = service.listRankSpecificRootNodes(classification, null, includeUnpublished, null, null, NODE_INIT_STRATEGY);
+        taxonNodes = service.listRankSpecificRootNodes(classification, null, null, includeUnpublished, null, null, NODE_INIT_STRATEGY);
         Assert.assertEquals(2, taxonNodes.size());
 
-        taxonNodes = service.listRankSpecificRootNodes(classification, Rank.SECTION_BOTANY(), includeUnpublished, null, null, NODE_INIT_STRATEGY);
+        taxonNodes = service.listRankSpecificRootNodes(classification, null, Rank.SECTION_BOTANY(), includeUnpublished, null, null, NODE_INIT_STRATEGY);
         Assert.assertEquals(4, taxonNodes.size());
 
         // also test if the pager works
-        taxonNodes = service.listRankSpecificRootNodes(classification, Rank.SECTION_BOTANY(), includeUnpublished, 10, 0, NODE_INIT_STRATEGY);
+        taxonNodes = service.listRankSpecificRootNodes(classification, null, Rank.SECTION_BOTANY(), includeUnpublished, 10, 0, NODE_INIT_STRATEGY);
         Assert.assertEquals(4, taxonNodes.size());
-        taxonNodes = service.listRankSpecificRootNodes(classification, Rank.SECTION_BOTANY(), includeUnpublished, 2, 0, NODE_INIT_STRATEGY);
+        taxonNodes = service.listRankSpecificRootNodes(classification, null, Rank.SECTION_BOTANY(), includeUnpublished, 2, 0, NODE_INIT_STRATEGY);
         Assert.assertEquals(2, taxonNodes.size());
-        taxonNodes = service.listRankSpecificRootNodes(classification, Rank.SECTION_BOTANY(), includeUnpublished, 2, 1, NODE_INIT_STRATEGY);
+        taxonNodes = service.listRankSpecificRootNodes(classification, null, Rank.SECTION_BOTANY(), includeUnpublished, 2, 1, NODE_INIT_STRATEGY);
         Assert.assertEquals(2, taxonNodes.size());
-        taxonNodes = service.listRankSpecificRootNodes(classification, Rank.SECTION_BOTANY(), includeUnpublished, 2, 2, NODE_INIT_STRATEGY);
+        taxonNodes = service.listRankSpecificRootNodes(classification, null, Rank.SECTION_BOTANY(), includeUnpublished, 2, 2, NODE_INIT_STRATEGY);
         Assert.assertEquals(0, taxonNodes.size());
 
-        taxonNodes = service.listRankSpecificRootNodes(classification, Rank.SPECIES(), includeUnpublished, null, null, NODE_INIT_STRATEGY);
+        taxonNodes = service.listRankSpecificRootNodes(classification, null, Rank.SPECIES(), includeUnpublished, null, null, NODE_INIT_STRATEGY);
         Assert.assertEquals(3, taxonNodes.size());
 
         // also test if the pager works
-        taxonNodes = service.listRankSpecificRootNodes(classification, Rank.SPECIES(), includeUnpublished, 10, 0, NODE_INIT_STRATEGY);
+        taxonNodes = service.listRankSpecificRootNodes(classification, null, Rank.SPECIES(), includeUnpublished, 10, 0, NODE_INIT_STRATEGY);
         Assert.assertEquals(3, taxonNodes.size());
-        taxonNodes = service.listRankSpecificRootNodes(classification, Rank.SPECIES(), includeUnpublished, 2, 1, NODE_INIT_STRATEGY);
+        taxonNodes = service.listRankSpecificRootNodes(classification, null, Rank.SPECIES(), includeUnpublished, 2, 1, NODE_INIT_STRATEGY);
         Assert.assertEquals(1, taxonNodes.size());
 
 
index cc1d51ba94cbfaf9bb055b10b804138e97a31bcf..5997b5719b691401ff4d715008ea3557729105f6 100644 (file)
@@ -17,7 +17,6 @@ import java.util.List;
 import java.util.Set;
 import java.util.UUID;
 
-import org.apache.log4j.Level;
 import org.apache.log4j.Logger;
 import org.junit.Assert;
 import org.junit.Ignore;
@@ -984,7 +983,7 @@ public class NameServiceImplTest extends CdmTransactionalIntegrationTest {
         List<Restriction<?>> restrictions;
         Pager<TaxonName> result;
 
-        Logger.getLogger("org.hibernate.SQL").setLevel(Level.TRACE);
+        //Logger.getLogger("org.hibernate.SQL").setLevel(Level.TRACE);
 
         restrictions = Arrays.asList(new Restriction<String>("typeDesignations.typeName.titleCache", Operator.AND, null, "Name1"));
         result = nameService.findByTitleWithRestrictions(null, "Name3", MatchMode.EXACT, restrictions, null, null, null, null);
index 2df33a14e02d8c452f10a1650b39e18f1e866ca1..a90622c748a08c609ddfd68a28738c22d29915fe 100644 (file)
@@ -91,12 +91,16 @@ public class TaxonServiceSearchTest extends CdmTransactionalIntegrationTest {
     private static final UUID ABIES_ALBA_UUID = UUID.fromString("7dbd5810-a3e5-44b6-b563-25152b8867f4");\r
     private static final UUID CLASSIFICATION_UUID = UUID.fromString("2a5ceebb-4830-4524-b330-78461bf8cb6b");\r
     private static final UUID CLASSIFICATION_ALT_UUID = UUID.fromString("d7c741e3-ae9e-4a7d-a566-9e3a7a0b51ce");\r
-    private static final UUID D_ABIES_BALSAMEA_UUID = UUID.fromString("900108d8-e6ce-495e-b32e-7aad3099135e");\r
-    private static final UUID D_ABIES_ALBA_UUID = UUID.fromString("ec8bba03-d993-4c85-8472-18b14942464b");\r
-    private static final UUID D_ABIES_KAWAKAMII_SEC_KOMAROV_UUID = UUID.fromString("e9d8c2fd-6409-46d5-9c2e-14a2bbb1b2b1");\r
     private static final UUID ABIES_SUBALPINA_UUID = UUID.fromString("9fee273c-c819-4f1f-913a-cd910465df51");\r
     private static final UUID ABIES_LASIOCARPA_UUID = UUID.fromString("9ce1fecf-c1ad-4127-be01-85d5d9f847ce");\r
 \r
+    private static final UUID ROOTNODE_CLASSIFICATION_5000 = UUID.fromString("a8266e45-091f-432f-87ae-c625e6aa9bbc");\r
+\r
+    private static final UUID DESC_ABIES_BALSAMEA_UUID = UUID.fromString("900108d8-e6ce-495e-b32e-7aad3099135e");\r
+    private static final UUID DESC_ABIES_ALBA_UUID = UUID.fromString("ec8bba03-d993-4c85-8472-18b14942464b");\r
+    private static final UUID DESC_ABIES_KAWAKAMII_SEC_KOMAROV_UUID = UUID.fromString("e9d8c2fd-6409-46d5-9c2e-14a2bbb1b2b1");\r
+\r
+\r
     private static final int NUM_OF_NEW_RADOM_ENTITIES = 1000;\r
 \r
     private boolean includeUnpublished = true;\r
@@ -116,10 +120,11 @@ public class TaxonServiceSearchTest extends CdmTransactionalIntegrationTest {
     @SpringBeanByType\r
     private INameService nameService;\r
     @SpringBeanByType\r
-    private ICdmMassIndexer indexer;\r
+    private ITaxonNodeService nodeService;\r
 \r
     @SpringBeanByType\r
-    private ITaxonNodeService nodeService;\r
+    private ICdmMassIndexer indexer;\r
+\r
 \r
     private static final int BENCHMARK_ROUNDS = 300;\r
 \r
@@ -158,21 +163,22 @@ public class TaxonServiceSearchTest extends CdmTransactionalIntegrationTest {
     public final void testPurgeAndReindex() throws IOException, LuceneParseException {\r
 \r
         refreshLuceneIndex();\r
+        TaxonNode subtree = null;\r
 \r
-        Pager<SearchResult<TaxonBase>> pager = taxonService.findByFullText(null, "Abies", null, includeUnpublished,\r
-                null, true, null, null, null, null); // --> 8\r
+        Pager<SearchResult<TaxonBase>> pager = taxonService.findByFullText(null, "Abies", null, subtree,\r
+                includeUnpublished, null, true, null, null, null, null); // --> 8\r
         Assert.assertEquals("Expecting 8 entities", 8, pager.getCount().intValue());\r
 \r
         indexer.purge(null);\r
         commitAndStartNewTransaction(null);\r
 \r
-        pager = taxonService.findByFullText(null, "Abies", null, includeUnpublished, null, true, null, null, null, null); // --> 0\r
+        pager = taxonService.findByFullText(null, "Abies", null, subtree, includeUnpublished, null, true, null, null, null, null); // --> 0\r
         Assert.assertEquals("Expecting no entities since the index has been purged", 0, pager.getCount().intValue());\r
 \r
         indexer.reindex(indexer.indexedClasses(), null);\r
         commitAndStartNewTransaction(null);\r
 \r
-        pager = taxonService.findByFullText(null, "Abies", null, includeUnpublished, null, true, null, null, null, null); // --> 8\r
+        pager = taxonService.findByFullText(null, "Abies", null, subtree, includeUnpublished, null, true, null, null, null, null); // --> 8\r
         Assert.assertEquals("Expecting 8 entities", 8, pager.getCount().intValue());\r
     }\r
 \r
@@ -183,24 +189,26 @@ public class TaxonServiceSearchTest extends CdmTransactionalIntegrationTest {
     public final void testFindByDescriptionElementFullText_CommonName() throws IOException,\r
             LuceneParseException {\r
 \r
+        TaxonNode subtree = null;\r
         refreshLuceneIndex();\r
 \r
-        Pager<SearchResult<TaxonBase>> pager = taxonService.findByDescriptionElementFullText(CommonTaxonName.class, "Wei"+UTF8.SHARP_S+"tanne", null, null, null,\r
+        Pager<SearchResult<TaxonBase>> pager = taxonService.findByDescriptionElementFullText(CommonTaxonName.class, "Wei"+UTF8.SHARP_S+"tanne",\r
+                null, subtree, null, null,\r
                 false, null, null, null, null);\r
         Assert.assertEquals("Expecting one entity when searching for CommonTaxonName", 1,\r
                 pager.getCount().intValue());\r
 \r
         // the description containing the Nulltanne has no taxon attached,\r
         // taxon.id = null\r
-        pager = taxonService.findByDescriptionElementFullText(CommonTaxonName.class, "Nulltanne", null, null, null,\r
+        pager = taxonService.findByDescriptionElementFullText(CommonTaxonName.class, "Nulltanne", null, subtree, null, null,\r
                 false, null, null, null, null);\r
         Assert.assertEquals("Expecting no entity when searching for 'Nulltanne' ", 0, pager.getCount().intValue());\r
 \r
-        pager = taxonService.findByDescriptionElementFullText(CommonTaxonName.class, "Wei"+UTF8.SHARP_S+"tanne", null, null,\r
+        pager = taxonService.findByDescriptionElementFullText(CommonTaxonName.class, "Wei"+UTF8.SHARP_S+"tanne", null, subtree, null,\r
                 Arrays.asList(new Language[] { Language.GERMAN() }), false, null, null, null, null);\r
         Assert.assertEquals("Expecting one entity when searching in German", 1, pager.getCount().intValue());\r
 \r
-        pager = taxonService.findByDescriptionElementFullText(CommonTaxonName.class, "Wei"+UTF8.SHARP_S+"tanne", null, null,\r
+        pager = taxonService.findByDescriptionElementFullText(CommonTaxonName.class, "Wei"+UTF8.SHARP_S+"tanne", null, subtree, null,\r
                 Arrays.asList(new Language[] { Language.RUSSIAN() }), false, null, null, null, null);\r
         Assert.assertEquals("Expecting no entity when searching in Russian", 0, pager.getCount().intValue());\r
 \r
@@ -212,12 +220,13 @@ public class TaxonServiceSearchTest extends CdmTransactionalIntegrationTest {
     public final void testFindByDescriptionElementFullText_Distribution() throws IOException, LuceneParseException {\r
 \r
         refreshLuceneIndex();\r
+        TaxonNode subtree = null;\r
 \r
         // by Area\r
-        Pager<SearchResult<TaxonBase>>  pager = taxonService.findByDescriptionElementFullText(null, "Canada", null, null, null, false, null, null, null, null);\r
+        Pager<SearchResult<TaxonBase>>  pager = taxonService.findByDescriptionElementFullText(null, "Canada", null, subtree, null, null, false, null, null, null, null);\r
         Assert.assertEquals("Expecting one entity when searching for arae 'Canada'", 1, pager.getCount().intValue());\r
         // by Status\r
-        pager = taxonService.findByDescriptionElementFullText(null, "present", null, null, null, false, null, null, null, null);\r
+        pager = taxonService.findByDescriptionElementFullText(null, "present", null, subtree, null, null, false, null, null, null, null);\r
         Assert.assertEquals("Expecting one entity when searching for status 'present'", 1, pager.getCount().intValue());\r
     }\r
 \r
@@ -227,8 +236,9 @@ public class TaxonServiceSearchTest extends CdmTransactionalIntegrationTest {
     public final void testFindByDescriptionElementFullText_wildcard() throws IOException, LuceneParseException {\r
 \r
         refreshLuceneIndex();\r
+        TaxonNode subtree = null;\r
 \r
-        Pager<SearchResult<TaxonBase>> pager = taxonService.findByDescriptionElementFullText(CommonTaxonName.class, "Wei"+UTF8.SHARP_S+"*", null, null, null, false, null, null, null, null);\r
+        Pager<SearchResult<TaxonBase>> pager = taxonService.findByDescriptionElementFullText(CommonTaxonName.class, "Wei"+UTF8.SHARP_S+"*", null, subtree, null, null, false, null, null, null, null);\r
         Assert.assertEquals("Expecting one entity when searching for CommonTaxonName", 1, pager.getCount().intValue());\r
     }\r
 \r
@@ -243,8 +253,10 @@ public class TaxonServiceSearchTest extends CdmTransactionalIntegrationTest {
     @DataSet\r
     public final void testFindByDescriptionElementFullText_TooManyClauses() throws IOException, LuceneParseException {\r
 \r
+        TaxonNode subtree = null;\r
+\r
         // generate 1024 terms to reproduce the bug\r
-        TaxonDescription description = (TaxonDescription) descriptionService.find(D_ABIES_ALBA_UUID);\r
+        TaxonDescription description = (TaxonDescription) descriptionService.find(DESC_ABIES_ALBA_UUID);\r
         Set<String> uniqueRandomStrs = new HashSet<>(1024);\r
         while(uniqueRandomStrs.size() < 1024){\r
             uniqueRandomStrs.add(RandomStringUtils.random(10, true, false));\r
@@ -257,7 +269,7 @@ public class TaxonServiceSearchTest extends CdmTransactionalIntegrationTest {
 \r
         refreshLuceneIndex();\r
 \r
-        Pager<SearchResult<TaxonBase>> pager = taxonService.findByDescriptionElementFullText(CommonTaxonName.class, "Rot*", null, null, null, false, null, null, null, null);\r
+        Pager<SearchResult<TaxonBase>> pager = taxonService.findByDescriptionElementFullText(CommonTaxonName.class, "Rot*", null, subtree, null, null, false, null, null, null, null);\r
         Assert.assertEquals("Expecting all 1024 entities grouped into one SearchResult item when searching for Rot*", 1, pager.getCount().intValue());\r
     }\r
 \r
@@ -272,6 +284,7 @@ public class TaxonServiceSearchTest extends CdmTransactionalIntegrationTest {
     @DataSet(loadStrategy=CleanSweepInsertLoadStrategy.class)\r
     public final void testFullText_Paging() throws IOException, LuceneParseException {\r
 \r
+        TaxonNode subtree = null;\r
         Reference sec = ReferenceFactory.newDatabase();\r
         referenceService.save(sec);\r
 \r
@@ -297,12 +310,12 @@ public class TaxonServiceSearchTest extends CdmTransactionalIntegrationTest {
 \r
         int pageSize = 10;\r
 \r
-        Pager<SearchResult<TaxonBase>> pager = taxonService.findByDescriptionElementFullText(CommonTaxonName.class, "Rot*", null, null, null, false, pageSize, null, null, null);\r
+        Pager<SearchResult<TaxonBase>> pager = taxonService.findByDescriptionElementFullText(CommonTaxonName.class, "Rot*", null, subtree, null, null, false, pageSize, null, null, null);\r
         Assert.assertEquals("unexpeted number of pages", Integer.valueOf(numOfItems / pageSize), pager.getPagesAvailable());\r
-        pager = taxonService.findByDescriptionElementFullText(CommonTaxonName.class, "Rot*", null, null, null, false, pageSize, 9, null, null);\r
+        pager = taxonService.findByDescriptionElementFullText(CommonTaxonName.class, "Rot*", null, subtree, null, null, false, pageSize, 9, null, null);\r
         Assert.assertNotNull("last page must have records", pager.getRecords());\r
         Assert.assertNotNull("last item on last page must exist", pager.getRecords().get(0));\r
-        pager = taxonService.findByDescriptionElementFullText(CommonTaxonName.class, "Rot*", null, null, null, false, pageSize, 10, null, null);\r
+        pager = taxonService.findByDescriptionElementFullText(CommonTaxonName.class, "Rot*", null, subtree, null, null, false, pageSize, 10, null, null);\r
         Assert.assertNotNull("last page + 1 must not have any records", pager.getRecords());\r
     }\r
 \r
@@ -321,6 +334,7 @@ public class TaxonServiceSearchTest extends CdmTransactionalIntegrationTest {
     @Ignore // test fails, maybe the assumptions made here are not compatible with the lucene scoring mechanism see http://lucene.apache.org/core/3_6_1/scoring.html\r
     public final void testFullText_ScoreAndOrder_1() throws IOException, LuceneParseException {\r
 \r
+        TaxonNode subtree = null;\r
         int numOfTaxa = 3;\r
 \r
         UUID[] taxonUuids = new UUID[numOfTaxa];\r
@@ -342,7 +356,7 @@ public class TaxonServiceSearchTest extends CdmTransactionalIntegrationTest {
         commitAndStartNewTransaction(null);\r
         refreshLuceneIndex();\r
 \r
-        Pager<SearchResult<TaxonBase>> pager = taxonService.findByDescriptionElementFullText(TextData.class, "Rot", null, null, null, false, null, null, null, null);\r
+        Pager<SearchResult<TaxonBase>> pager = taxonService.findByDescriptionElementFullText(TextData.class, "Rot", null, subtree, null, null, false, null, null, null, null);\r
         for(int i = 0; i < numOfTaxa; i++){\r
             Assert.assertEquals("taxa should be orderd by relevance (= score)", taxonUuids[numOfTaxa - i - 1], pager.getRecords().get(i).getEntity().getUuid());\r
         }\r
@@ -364,6 +378,7 @@ public class TaxonServiceSearchTest extends CdmTransactionalIntegrationTest {
     @Ignore // test fails, maybe the assumptions made here are not compatible with the lucene scoring mechanism see http://lucene.apache.org/core/3_6_1/scoring.html\r
     public final void testFullText_ScoreAndOrder_2() throws IOException, LuceneParseException {\r
 \r
+        TaxonNode subtree = null;\r
         int numOfTaxa = 3;\r
 \r
         UUID[] taxonUuids = new UUID[numOfTaxa];\r
@@ -385,7 +400,7 @@ public class TaxonServiceSearchTest extends CdmTransactionalIntegrationTest {
         commitAndStartNewTransaction(null);\r
         refreshLuceneIndex();\r
 \r
-        Pager<SearchResult<TaxonBase>> pager = taxonService.findByDescriptionElementFullText(TextData.class, "Rot", null, null, null, false, null, null, null, null);\r
+        Pager<SearchResult<TaxonBase>> pager = taxonService.findByDescriptionElementFullText(TextData.class, "Rot", null, subtree, null, null, false, null, null, null, null);\r
         for(int i = 0; i < numOfTaxa; i++){\r
             Assert.assertEquals("taxa should be orderd by relevance (= score)", taxonUuids[numOfTaxa - i - 1], pager.getRecords().get(i).getEntity().getUuid());\r
         }\r
@@ -402,7 +417,8 @@ public class TaxonServiceSearchTest extends CdmTransactionalIntegrationTest {
     @DataSet\r
     public final void testFullText_Grouping() throws IOException, LuceneParseException, LuceneMultiSearchException {\r
 \r
-        TaxonDescription description = (TaxonDescription) descriptionService.find(D_ABIES_ALBA_UUID);\r
+        TaxonNode subtree = null;\r
+        TaxonDescription description = (TaxonDescription) descriptionService.find(DESC_ABIES_ALBA_UUID);\r
         Set<String> uniqueRandomStrs = new HashSet<>(1024);\r
         int numOfItems = 100;\r
         while(uniqueRandomStrs.size() < numOfItems){\r
@@ -422,7 +438,7 @@ public class TaxonServiceSearchTest extends CdmTransactionalIntegrationTest {
          boolean highlightFragments = true;\r
 \r
         // test with findByDescriptionElementFullText\r
-        Pager<SearchResult<TaxonBase>> pager = taxonService.findByDescriptionElementFullText(CommonTaxonName.class, "Rot*", null, null, null, highlightFragments, pageSize, null, null, null);\r
+        Pager<SearchResult<TaxonBase>> pager = taxonService.findByDescriptionElementFullText(CommonTaxonName.class, "Rot*", null, subtree, null, null, highlightFragments, pageSize, null, null, null);\r
         logFreeTextSearchResults(pager, Level.DEBUG, null);\r
         Assert.assertEquals("All matches should be grouped into a single SearchResult element", 1, pager.getRecords().size());\r
         Assert.assertEquals("The count property of the pager must be set correctly", 1, pager.getCount().intValue());\r
@@ -432,7 +448,7 @@ public class TaxonServiceSearchTest extends CdmTransactionalIntegrationTest {
         Assert.assertEquals("expecting 10 highlighted fragments of field 'name'", maxDocsPerGroup, highlightMap.get("name").length);\r
 \r
         // test with findByEverythingFullText\r
-        pager = taxonService.findByEverythingFullText( "Rot*", null, includeUnpublished, null, highlightFragments, pageSize, null, null, null);\r
+        pager = taxonService.findByEverythingFullText( "Rot*", null, subtree, includeUnpublished, null, highlightFragments, pageSize, null, null, null);\r
         logFreeTextSearchResults(pager, Level.DEBUG, null);\r
         Assert.assertEquals("All matches should be grouped into a single SearchResult element", 1, pager.getRecords().size());\r
         Assert.assertEquals("The count property of the pager must be set correctly", 1, pager.getCount().intValue());\r
@@ -449,8 +465,9 @@ public class TaxonServiceSearchTest extends CdmTransactionalIntegrationTest {
     public final void testFindByDescriptionElementFullText_TextData() throws IOException, LuceneParseException {\r
 \r
         refreshLuceneIndex();\r
+        TaxonNode subtree = null;\r
 \r
-        Pager<SearchResult<TaxonBase>> pager = taxonService.findByDescriptionElementFullText(TextData.class, "Abies", null, null, null, false, null, null, null, null);\r
+        Pager<SearchResult<TaxonBase>> pager = taxonService.findByDescriptionElementFullText(TextData.class, "Abies", null, subtree, null, null, false, null, null, null, null);\r
         logFreeTextSearchResults(pager, Level.DEBUG, null);\r
         Assert.assertEquals("Expecting one entity when searching for any TextData", 1, pager.getCount().intValue());\r
         Assert.assertEquals("Abies balsamea sec. Kohlbecker, A., Testcase standart views, 2013", pager.getRecords().get(0).getEntity().getTitleCache());\r
@@ -458,33 +475,33 @@ public class TaxonServiceSearchTest extends CdmTransactionalIntegrationTest {
         Assert.assertEquals("Abies balsamea sec. Kohlbecker, A., Testcase standart views, 2013", pager.getRecords().get(0).getDocs().iterator().next().get("inDescription.taxon.titleCache"));\r
 \r
 \r
-        pager = taxonService.findByDescriptionElementFullText(null, "Abies", null, null, null, false, null, null, null, null);\r
+        pager = taxonService.findByDescriptionElementFullText(null, "Abies", null, subtree, null, null, false, null, null, null, null);\r
         Assert.assertEquals("Expecting one entity when searching for any type", 1, pager.getCount().intValue());\r
 \r
-        pager = taxonService.findByDescriptionElementFullText(null, "Abies", null, Arrays.asList(new Feature[]{Feature.UNKNOWN()}), null, false, null, null, null, null);\r
+        pager = taxonService.findByDescriptionElementFullText(null, "Abies", null, subtree, Arrays.asList(new Feature[]{Feature.UNKNOWN()}), null, false, null, null, null, null);\r
         Assert.assertEquals("Expecting one entity when searching for any type and for Feature DESCRIPTION", 1, pager.getCount().intValue());\r
 \r
-        pager = taxonService.findByDescriptionElementFullText(null, "Abies", null, Arrays.asList(new Feature[]{Feature.CHROMOSOME_NUMBER()}), null, false, null, null, null, null);\r
+        pager = taxonService.findByDescriptionElementFullText(null, "Abies", null, subtree, Arrays.asList(new Feature[]{Feature.CHROMOSOME_NUMBER()}), null, false, null, null, null, null);\r
         Assert.assertEquals("Expecting no entity when searching for any type and for Feature CHROMOSOME_NUMBER", 0, pager.getCount().intValue());\r
 \r
-        pager = taxonService.findByDescriptionElementFullText(null, "Abies", null, Arrays.asList(new Feature[]{Feature.CHROMOSOME_NUMBER(), Feature.UNKNOWN()}), null, false, null, null, null, null);\r
+        pager = taxonService.findByDescriptionElementFullText(null, "Abies", null, subtree, Arrays.asList(new Feature[]{Feature.CHROMOSOME_NUMBER(), Feature.UNKNOWN()}), null, false, null, null, null, null);\r
         Assert.assertEquals("Expecting no entity when searching for any type and for Feature DESCRIPTION or CHROMOSOME_NUMBER", 1, pager.getCount().intValue());\r
 \r
-        pager = taxonService.findByDescriptionElementFullText(Distribution.class, "Abies", null, null, null, false, null, null, null, null);\r
+        pager = taxonService.findByDescriptionElementFullText(Distribution.class, "Abies", null, subtree, null, null, false, null, null, null, null);\r
         Assert.assertEquals("Expecting no entity when searching for Distribution", 0, pager.getCount().intValue());\r
 \r
-        pager = taxonService.findByDescriptionElementFullText(TextData.class, "Бальзам", null, null, Arrays.asList(new Language[]{}), false, null, null, null, null);\r
+        pager = taxonService.findByDescriptionElementFullText(TextData.class, "Бальзам", null, subtree, null, Arrays.asList(new Language[]{}), false, null, null, null, null);\r
         Assert.assertEquals("Expecting one entity", 1, pager.getCount().intValue());\r
         Assert.assertEquals("Abies balsamea sec. Kohlbecker, A., Testcase standart views, 2013", pager.getRecords().get(0).getEntity().getTitleCache());\r
 \r
-        pager = taxonService.findByDescriptionElementFullText(TextData.class, "Бальзам", null, null, Arrays.asList(new Language[]{Language.RUSSIAN()}), false, null, null, null, null);\r
+        pager = taxonService.findByDescriptionElementFullText(TextData.class, "Бальзам", null, subtree, null, Arrays.asList(new Language[]{Language.RUSSIAN()}), false, null, null, null, null);\r
         Assert.assertEquals("Expecting one entity", 1, pager.getCount().intValue());\r
         Assert.assertEquals("Abies balsamea sec. Kohlbecker, A., Testcase standart views, 2013", pager.getRecords().get(0).getEntity().getTitleCache());\r
 \r
-        pager = taxonService.findByDescriptionElementFullText(TextData.class, "Бальзам", null, null, Arrays.asList(new Language[]{Language.GERMAN()}), false, null, null, null, null);\r
+        pager = taxonService.findByDescriptionElementFullText(TextData.class, "Бальзам", null, subtree, null, Arrays.asList(new Language[]{Language.GERMAN()}), false, null, null, null, null);\r
         Assert.assertEquals("Expecting no entity", 0, pager.getCount().intValue());\r
 \r
-        pager = taxonService.findByDescriptionElementFullText(TextData.class, "Balsam-Tanne", null, null, Arrays.asList(new Language[]{Language.GERMAN(), Language.RUSSIAN()}), false, null, null, null, null);\r
+        pager = taxonService.findByDescriptionElementFullText(TextData.class, "Balsam-Tanne", null, subtree, null, Arrays.asList(new Language[]{Language.GERMAN(), Language.RUSSIAN()}), false, null, null, null, null);\r
         Assert.assertEquals("Expecting one entity", 1, pager.getCount().intValue());\r
         Assert.assertEquals("Abies balsamea sec. Kohlbecker, A., Testcase standart views, 2013", pager.getRecords().get(0).getEntity().getTitleCache());\r
     }\r
@@ -495,26 +512,27 @@ public class TaxonServiceSearchTest extends CdmTransactionalIntegrationTest {
     public final void testFindByDescriptionElementFullText_MultipleWords() throws IOException, LuceneParseException {\r
 \r
         refreshLuceneIndex();\r
+        TaxonNode subtree = null;\r
 \r
         // Pflanzenart aus der Gattung der Tannen\r
         long start = System.currentTimeMillis();\r
 \r
-        Pager<SearchResult<TaxonBase>> pager = taxonService.findByDescriptionElementFullText(TextData.class, "Pflanzenart Tannen", null, null, null, false, null, null, null, null);\r
+        Pager<SearchResult<TaxonBase>> pager = taxonService.findByDescriptionElementFullText(TextData.class, "Pflanzenart Tannen", null, subtree, null, null, false, null, null, null, null);\r
         Assert.assertEquals("OR search : Expecting one entity", 1, pager.getCount().intValue());\r
 \r
-        pager = taxonService.findByDescriptionElementFullText(TextData.class, "Pflanzenart Wespen", null, null, null, false, null, null, null, null);\r
+        pager = taxonService.findByDescriptionElementFullText(TextData.class, "Pflanzenart Wespen", null, subtree, null, null, false, null, null, null, null);\r
         Assert.assertEquals("OR search : Expecting one entity", 1, pager.getCount().intValue());\r
 \r
-        pager = taxonService.findByDescriptionElementFullText(TextData.class, "+Pflanzenart +Tannen", null, null, null, false, null, null, null, null);\r
+        pager = taxonService.findByDescriptionElementFullText(TextData.class, "+Pflanzenart +Tannen", null, subtree, null, null, false, null, null, null, null);\r
         Assert.assertEquals("AND search : Expecting one entity", 1, pager.getCount().intValue());\r
 \r
-        pager = taxonService.findByDescriptionElementFullText(TextData.class, "+Pflanzenart +Wespen", null, null, null, false, null, null, null, null);\r
+        pager = taxonService.findByDescriptionElementFullText(TextData.class, "+Pflanzenart +Wespen", null, subtree, null, null, false, null, null, null, null);\r
         Assert.assertEquals("AND search : Expecting no entity", 0, pager.getCount().intValue());\r
 \r
-        pager = taxonService.findByDescriptionElementFullText(TextData.class, "\"Pflanzenart aus der Gattung der Tannen\"", null, null, null, false, null, null, null, null);\r
+        pager = taxonService.findByDescriptionElementFullText(TextData.class, "\"Pflanzenart aus der Gattung der Tannen\"", null, subtree, null, null, false, null, null, null, null);\r
         Assert.assertEquals("Phrase search : Expecting one entity", 1, pager.getCount().intValue());\r
 \r
-        pager = taxonService.findByDescriptionElementFullText(TextData.class, "\"Pflanzenart aus der Gattung der Wespen\"", null, null, null, false, null, null, null, null);\r
+        pager = taxonService.findByDescriptionElementFullText(TextData.class, "\"Pflanzenart aus der Gattung der Wespen\"", null, subtree, null, null, false, null, null, null, null);\r
         Assert.assertEquals("Phrase search : Expecting one entity", 0, pager.getCount().intValue());\r
 \r
         logger.info("testFindByDescriptionElementFullText_MultipleWords() duration: " + (System.currentTimeMillis() - start) + "ms");\r
@@ -528,10 +546,11 @@ public class TaxonServiceSearchTest extends CdmTransactionalIntegrationTest {
     public final void testFindByDescriptionElementFullText_modify_DescriptionElement() throws IOException, LuceneParseException {\r
 \r
         refreshLuceneIndex();\r
+        TaxonNode subtree = null;\r
 \r
         //\r
         // modify the DescriptionElement\r
-        Pager<SearchResult<TaxonBase>> pager = taxonService.findByDescriptionElementFullText(TextData.class, "Balsam-Tanne", null, null, Arrays.asList(new Language[]{Language.GERMAN(), Language.RUSSIAN()}), false, null, null, null, null);\r
+        Pager<SearchResult<TaxonBase>> pager = taxonService.findByDescriptionElementFullText(TextData.class, "Balsam-Tanne", null, subtree, null, Arrays.asList(new Language[]{Language.GERMAN(), Language.RUSSIAN()}), false, null, null, null, null);\r
         Assert.assertTrue("Search did not return any results", pager.getRecords().size() > 0);\r
         Assert.assertTrue("Expecting only one doc", pager.getRecords().get(0).getDocs().size() == 1);\r
         Document indexDocument = pager.getRecords().get(0).getDocs().iterator().next();\r
@@ -550,11 +569,11 @@ public class TaxonServiceSearchTest extends CdmTransactionalIntegrationTest {
 //        );\r
 \r
         //\r
-        pager = taxonService.findByDescriptionElementFullText(TextData.class, "Balsam-Tanne", null, null, Arrays.asList(new Language[]{Language.GERMAN(), Language.RUSSIAN()}), false, null, null, null, null);\r
+        pager = taxonService.findByDescriptionElementFullText(TextData.class, "Balsam-Tanne", null, subtree, null, Arrays.asList(new Language[]{Language.GERMAN(), Language.RUSSIAN()}), false, null, null, null, null);\r
         Assert.assertEquals("The german 'Balsam-Tanne' TextData should no longer be indexed", 0, pager.getCount().intValue());\r
-        pager = taxonService.findByDescriptionElementFullText(TextData.class, "abeto", null, null, Arrays.asList(new Language[]{Language.SPANISH_CASTILIAN()}), false, null, null, null, null);\r
+        pager = taxonService.findByDescriptionElementFullText(TextData.class, "abeto", null, subtree, null, Arrays.asList(new Language[]{Language.SPANISH_CASTILIAN()}), false, null, null, null, null);\r
         Assert.assertEquals("expecting to find the SPANISH_CASTILIAN 'abeto bals"+UTF8.SMALL_A_ACUTE+"mico'", 1, pager.getCount().intValue());\r
-        pager = taxonService.findByDescriptionElementFullText(TextData.class, "bals"+UTF8.SMALL_A_ACUTE+"mico", null, null, null, false, null, null, null, null);\r
+        pager = taxonService.findByDescriptionElementFullText(TextData.class, "bals"+UTF8.SMALL_A_ACUTE+"mico", null, subtree, null, null, false, null, null, null, null);\r
         Assert.assertEquals("expecting to find the SPANISH_CASTILIAN 'abeto bals"+UTF8.SMALL_A_ACUTE+"mico'", 1, pager.getCount().intValue());\r
 \r
         //\r
@@ -569,9 +588,9 @@ public class TaxonServiceSearchTest extends CdmTransactionalIntegrationTest {
         }\r
         descriptionService.saveOrUpdate(description);\r
         commitAndStartNewTransaction(null);\r
-        pager = taxonService.findByDescriptionElementFullText(TextData.class, "abeto", null, null, Arrays.asList(new Language[]{Language.SPANISH_CASTILIAN()}), false, null, null, null, null);\r
+        pager = taxonService.findByDescriptionElementFullText(TextData.class, "abeto", null, subtree, null, Arrays.asList(new Language[]{Language.SPANISH_CASTILIAN()}), false, null, null, null, null);\r
         Assert.assertEquals("The spanish 'abeto bals"+UTF8.SMALL_A_ACUTE+"mico' TextData should no longer be indexed", 0, pager.getCount().intValue());\r
-        pager = taxonService.findByDescriptionElementFullText(TextData.class, "balsamiczna", null, null, Arrays.asList(new Language[]{Language.POLISH()}), false, null, null, null, null);\r
+        pager = taxonService.findByDescriptionElementFullText(TextData.class, "balsamiczna", null, subtree, null, Arrays.asList(new Language[]{Language.POLISH()}), false, null, null, null, null);\r
         Assert.assertEquals("expecting to find the POLISH 'Jod"+UTF8.POLISH_L+"a balsamiczna'", 1, pager.getCount().intValue());\r
     }\r
 \r
@@ -581,11 +600,13 @@ public class TaxonServiceSearchTest extends CdmTransactionalIntegrationTest {
     public final void testFindByDescriptionElementFullText_modify_Taxon() throws IOException, LuceneParseException {\r
 \r
         refreshLuceneIndex();\r
+        TaxonNode subtree = null;\r
 \r
         Taxon t_abies_balsamea = (Taxon)taxonService.find(ABIES_BALSAMEA_UUID);\r
-        TaxonDescription d_abies_balsamea = (TaxonDescription)descriptionService.find(D_ABIES_BALSAMEA_UUID);\r
+        TaxonDescription d_abies_balsamea = (TaxonDescription)descriptionService.find(DESC_ABIES_BALSAMEA_UUID);\r
 \r
-        Pager<SearchResult<TaxonBase>> pager = taxonService.findByDescriptionElementFullText(TextData.class, "Balsam-Tanne", null, null, Arrays.asList(new Language[]{Language.GERMAN()}), false, null, null, null, null);\r
+        Pager<SearchResult<TaxonBase>> pager = taxonService.findByDescriptionElementFullText(TextData.class, "Balsam-Tanne",\r
+                null, subtree, null, Arrays.asList(new Language[]{Language.GERMAN()}), false, null, null, null, null);\r
         Assert.assertEquals("expecting to find the GERMAN 'Balsam-Tanne'", 1, pager.getCount().intValue());\r
 \r
         // exchange the Taxon with another one via the Taxon object\r
@@ -597,7 +618,8 @@ public class TaxonServiceSearchTest extends CdmTransactionalIntegrationTest {
 \r
         t_abies_balsamea = (Taxon)taxonService.find(t_abies_balsamea.getUuid());\r
 \r
-        pager = taxonService.findByDescriptionElementFullText(TextData.class, "Balsam-Tanne", null, null, Arrays.asList(new Language[]{Language.GERMAN()}), false, null, null, null, null);\r
+        pager = taxonService.findByDescriptionElementFullText(TextData.class, "Balsam-Tanne",\r
+                null, subtree, null, Arrays.asList(new Language[]{Language.GERMAN()}), false, null, null, null, null);\r
         Assert.assertEquals("'Balsam-Tanne' should no longer be found", 0, pager.getCount().intValue());\r
 \r
         // 2.) create new description and add to taxon:\r
@@ -620,7 +642,8 @@ public class TaxonServiceSearchTest extends CdmTransactionalIntegrationTest {
 //                "DESCRIPTIONBASE"\r
 //        });\r
 \r
-        pager = taxonService.findByDescriptionElementFullText(TextData.class, "mittelgro"+UTF8.SHARP_S+"er Baum", null, null, Arrays.asList(new Language[]{Language.GERMAN()}), false, null, null, null, null);\r
+        pager = taxonService.findByDescriptionElementFullText(TextData.class, "mittelgro"+UTF8.SHARP_S+"er Baum",\r
+                null, subtree, null, Arrays.asList(new Language[]{Language.GERMAN()}), false, null, null, null, null);\r
         Assert.assertEquals("the taxon should be found via the new Description", 1, pager.getCount().intValue());\r
     }\r
 \r
@@ -630,6 +653,7 @@ public class TaxonServiceSearchTest extends CdmTransactionalIntegrationTest {
     public final void testFindByDescriptionElementFullText_modify_Classification() throws IOException, LuceneParseException {\r
 \r
         refreshLuceneIndex();\r
+        TaxonNode subtree = null;\r
 \r
         // put taxon into other classification, new taxon node\r
         Classification classification = classificationService.find(CLASSIFICATION_UUID);\r
@@ -638,9 +662,9 @@ public class TaxonServiceSearchTest extends CdmTransactionalIntegrationTest {
         // TODO: why is the test failing when the childNode is already retrieved here, and not after the following four lines?\r
         //TaxonNode childNode = classification.getChildNodes().iterator().next();\r
 \r
-        Pager<SearchResult<TaxonBase>> pager = taxonService.findByDescriptionElementFullText(TextData.class, "Balsam-Tanne", null, null, Arrays.asList(new Language[]{Language.GERMAN()}), false, null, null, null, null);\r
+        Pager<SearchResult<TaxonBase>> pager = taxonService.findByDescriptionElementFullText(TextData.class, "Balsam-Tanne", null, subtree, null, Arrays.asList(new Language[]{Language.GERMAN()}), false, null, null, null, null);\r
         Assert.assertEquals("expecting to find the GERMAN 'Balsam-Tanne' even if filtering by classification", 1, pager.getCount().intValue());\r
-        pager = taxonService.findByDescriptionElementFullText(TextData.class, "Balsam-Tanne", alternateClassification, null, Arrays.asList(new Language[]{Language.GERMAN()}), false, null, null, null, null);\r
+        pager = taxonService.findByDescriptionElementFullText(TextData.class, "Balsam-Tanne", alternateClassification, subtree, null, Arrays.asList(new Language[]{Language.GERMAN()}), false, null, null, null, null);\r
         Assert.assertEquals("GERMAN 'Balsam-Tanne' should NOT be found in other classification", 0, pager.getCount().intValue());\r
 \r
         // check for the right taxon node\r
@@ -661,14 +685,16 @@ public class TaxonServiceSearchTest extends CdmTransactionalIntegrationTest {
 \r
         // reload classification\r
         classification = classificationService.find(CLASSIFICATION_UUID);\r
-        pager = taxonService.findByDescriptionElementFullText(TextData.class, "Balsam-Tanne", alternateClassification, null, Arrays.asList(new Language[]{Language.GERMAN()}), false, null, null, null, null);\r
+        pager = taxonService.findByDescriptionElementFullText(TextData.class, "Balsam-Tanne",\r
+                alternateClassification, subtree, null, Arrays.asList(new Language[]{Language.GERMAN()}), false, null, null, null, null);\r
         Assert.assertEquals("GERMAN 'Balsam-Tanne' should now be found in other classification", 1, pager.getCount().intValue());\r
 \r
         classification.getChildNodes().clear();\r
         classificationService.saveOrUpdate(classification);\r
         commitAndStartNewTransaction(null);\r
 \r
-        pager = taxonService.findByDescriptionElementFullText(TextData.class, "Balsam-Tanne", classification, null, Arrays.asList(new Language[]{Language.GERMAN()}), false, null, null, null, null);\r
+        pager = taxonService.findByDescriptionElementFullText(TextData.class, "Balsam-Tanne",\r
+                classification, subtree, null, Arrays.asList(new Language[]{Language.GERMAN()}), false, null, null, null, null);\r
         Assert.assertEquals("Now the GERMAN 'Balsam-Tanne' should NOT be found in original classification", 0, pager.getCount().intValue());\r
 \r
     }\r
@@ -678,8 +704,9 @@ public class TaxonServiceSearchTest extends CdmTransactionalIntegrationTest {
     @DataSet\r
     public final void testFindByDescriptionElementFullText_CategoricalData() throws IOException, LuceneParseException {\r
 \r
+        TaxonNode subtree = null;\r
         // add CategoricalData\r
-        DescriptionBase d_abies_balsamea = descriptionService.find(D_ABIES_BALSAMEA_UUID);\r
+        DescriptionBase d_abies_balsamea = descriptionService.find(DESC_ABIES_BALSAMEA_UUID);\r
         // Categorical data\r
         CategoricalData cdata = CategoricalData.NewInstance();\r
         cdata.setFeature(Feature.DESCRIPTION());\r
@@ -700,7 +727,7 @@ public class TaxonServiceSearchTest extends CdmTransactionalIntegrationTest {
 \r
         refreshLuceneIndex();\r
 \r
-        Pager<SearchResult<TaxonBase>> pager = taxonService.findByDescriptionElementFullText(CategoricalData.class, "green", null, null, null, false, null, null, null, null);\r
+        Pager<SearchResult<TaxonBase>> pager = taxonService.findByDescriptionElementFullText(CategoricalData.class, "green", null, subtree, null, null, false, null, null, null, null);\r
         Assert.assertEquals("Expecting one entity", 1, pager.getCount().intValue());\r
         Assert.assertEquals("Abies balsamea sec. Kohlbecker, A., Testcase standart views, 2013", pager.getRecords().get(0).getEntity().getTitleCache());\r
         Assert.assertTrue("Expecting only one doc", pager.getRecords().get(0).getDocs().size() == 1);\r
@@ -730,33 +757,34 @@ public class TaxonServiceSearchTest extends CdmTransactionalIntegrationTest {
     public final void testFindByDescriptionElementFullText_Highlighting() throws IOException, LuceneParseException {\r
 \r
         refreshLuceneIndex();\r
+        TaxonNode subtree = null;\r
 \r
-        Pager<SearchResult<TaxonBase>> pager = taxonService.findByDescriptionElementFullText(TextData.class, "Abies", null, null, null, true, null, null, null, null);\r
+        Pager<SearchResult<TaxonBase>> pager = taxonService.findByDescriptionElementFullText(TextData.class, "Abies", null, subtree, null, null, true, null, null, null, null);\r
         Assert.assertEquals("Expecting one entity when searching for any TextData", 1, pager.getCount().intValue());\r
         SearchResult<TaxonBase> searchResult = pager.getRecords().get(0);\r
         Assert.assertTrue("the map of highlighted fragments should contain at least one item", searchResult.getFieldHighlightMap().size() > 0);\r
         String[] fragments = searchResult.getFieldHighlightMap().values().iterator().next();\r
         Assert.assertTrue("first fragments should contains serch term", fragments[0].contains("<B>Abies</B>"));\r
 \r
-        pager = taxonService.findByDescriptionElementFullText(TextData.class, "Pflanzenart Tannen", null, null, null, true, null, null, null, null);\r
+        pager = taxonService.findByDescriptionElementFullText(TextData.class, "Pflanzenart Tannen", null, subtree, null, null, true, null, null, null, null);\r
         searchResult = pager.getRecords().get(0);\r
         Assert.assertTrue("Phrase search : Expecting at least one item in highlighted fragments", searchResult.getFieldHighlightMap().size() > 0);\r
         fragments = searchResult.getFieldHighlightMap().values().iterator().next();\r
         Assert.assertTrue("first fragments should contains serch term", fragments[0].contains("<B>Pflanzenart</B>") || fragments[0].contains("<B>Tannen</B>"));\r
 \r
-        pager = taxonService.findByDescriptionElementFullText(TextData.class, "+Pflanzenart +Tannen", null, null, null, true, null, null, null, null);\r
+        pager = taxonService.findByDescriptionElementFullText(TextData.class, "+Pflanzenart +Tannen", null, subtree, null, null, true, null, null, null, null);\r
         searchResult = pager.getRecords().get(0);\r
         Assert.assertTrue("Phrase search : Expecting at least one item in highlighted fragments", searchResult.getFieldHighlightMap().size() > 0);\r
         fragments = searchResult.getFieldHighlightMap().values().iterator().next();\r
         Assert.assertTrue("first fragments should contains serch term", fragments[0].contains("<B>Pflanzenart</B>") && fragments[0].contains("<B>Tannen</B>"));\r
 \r
-        pager = taxonService.findByDescriptionElementFullText(TextData.class, "\"Pflanzenart aus der Gattung der Tannen\"", null, null, null, true, null, null, null, null);\r
+        pager = taxonService.findByDescriptionElementFullText(TextData.class, "\"Pflanzenart aus der Gattung der Tannen\"", null, subtree, null, null, true, null, null, null, null);\r
         searchResult = pager.getRecords().get(0);\r
         Assert.assertTrue("Phrase search : Expecting at least one item in highlighted fragments", searchResult.getFieldHighlightMap().size() > 0);\r
         fragments = searchResult.getFieldHighlightMap().values().iterator().next();\r
         Assert.assertTrue("first fragments should contains serch term", fragments[0].contains("<B>Pflanzenart</B> <B>aus</B> <B>der</B> <B>Gattung</B> <B>der</B> <B>Tannen</B>"));\r
 \r
-        pager = taxonService.findByDescriptionElementFullText(TextData.class, "Gatt*", null, null, null, true, null, null, null, null);\r
+        pager = taxonService.findByDescriptionElementFullText(TextData.class, "Gatt*", null, subtree, null, null, true, null, null, null, null);\r
         searchResult = pager.getRecords().get(0);\r
         Assert.assertTrue("Wildcard search : Expecting at least one item in highlighted fragments", searchResult.getFieldHighlightMap().size() > 0);\r
         fragments = searchResult.getFieldHighlightMap().values().iterator().next();\r
@@ -771,15 +799,21 @@ public class TaxonServiceSearchTest extends CdmTransactionalIntegrationTest {
         refreshLuceneIndex();\r
 \r
         classificationService.find(CLASSIFICATION_UUID);\r
+        TaxonNode subtree = null;\r
 \r
         boolean NO_UNPUBLISHED = false;\r
 \r
-        Pager<SearchResult<TaxonBase>> pager = taxonService.findByFullText(null, "Abies", null, includeUnpublished, null, true, null, null, null, null); // --> 7\r
+        Pager<SearchResult<TaxonBase>> pager = taxonService.findByFullText(null, "Abies", null, subtree, includeUnpublished, null, true, null, null, null, null); // --> 7\r
 //        logFreeTextSearchResults(pager, Level.DEBUG, null);\r
         Assert.assertEquals("Expecting 8 entities", 8, pager.getCount().intValue());\r
 \r
+        //subtree\r
+        subtree = nodeService.find(ROOTNODE_CLASSIFICATION_5000);\r
+        pager = taxonService.findByFullText(null, "Abies", null, subtree, includeUnpublished, null, true, null, null, null, null); // --> 0\r
+        Assert.assertEquals("Expecting 2 entities", 2, pager.getCount().intValue());\r
+        subtree = null;\r
 \r
-        pager = taxonService.findByFullText(null, "Abies", null, NO_UNPUBLISHED, null, true, null, null, null, null); // --> 7\r
+        pager = taxonService.findByFullText(null, "Abies", null, subtree, NO_UNPUBLISHED, null, true, null, null, null, null); // --> 7\r
 //        logFreeTextSearchResults(pager, Level.DEBUG, null);\r
         Assert.assertEquals("Expecting 6 entities", 6, pager.getCount().intValue());\r
         Synonym abiesSubalpina = (Synonym)taxonService.find(ABIES_SUBALPINA_UUID);\r
@@ -787,41 +821,42 @@ public class TaxonServiceSearchTest extends CdmTransactionalIntegrationTest {
         //accepted published, syn not published\r
         abiesSubalpina.getAcceptedTaxon().setPublish(true);\r
         commitAndStartNewTransaction();\r
-        pager = taxonService.findByFullText(null, "Abies", null, NO_UNPUBLISHED, null, true, null, null, null, null); // --> 7\r
+        pager = taxonService.findByFullText(null, "Abies", null, subtree, NO_UNPUBLISHED, null, true, null, null, null, null); // --> 7\r
         Assert.assertEquals("Expecting 7 entities", 7, pager.getCount().intValue());\r
 \r
         //accepted published, syn published\r
         abiesSubalpina = (Synonym)taxonService.find(abiesSubalpina.getUuid());\r
         abiesSubalpina.setPublish(true);\r
         commitAndStartNewTransaction();\r
-        pager = taxonService.findByFullText(null, "Abies", null, NO_UNPUBLISHED, null, true, null, null, null, null); // --> 7\r
+        pager = taxonService.findByFullText(null, "Abies", null, subtree, NO_UNPUBLISHED, null, true, null, null, null, null); // --> 7\r
         Assert.assertEquals("Expecting 8 entities", 8, pager.getCount().intValue());\r
 \r
         //accepted not published, syn published\r
         abiesSubalpina = (Synonym)taxonService.find(abiesSubalpina.getUuid());\r
         abiesSubalpina.getAcceptedTaxon().setPublish(false);\r
         commitAndStartNewTransaction();\r
-        pager = taxonService.findByFullText(null, "Abies", null, NO_UNPUBLISHED, null, true, null, null, null, null); // --> 7\r
+        pager = taxonService.findByFullText(null, "Abies", null, subtree, NO_UNPUBLISHED, null, true, null, null, null, null); // --> 7\r
         Assert.assertEquals("Expecting 6 entities. Synonym and accepted should not be found, though synonym is published",\r
                 6, pager.getCount().intValue());\r
 \r
-        pager = taxonService.findByFullText(Taxon.class, "Abies", null, includeUnpublished, null, true, null, null, null, null); // --> 6\r
+        pager = taxonService.findByFullText(Taxon.class, "Abies", null, subtree, includeUnpublished, null, true, null, null, null, null); // --> 6\r
         Assert.assertEquals("Expecting 7 entities", 7, pager.getCount().intValue());\r
 \r
-        pager = taxonService.findByFullText(Synonym.class, "Abies", null, includeUnpublished, null, true, null, null, null, null); // --> 1\r
+        pager = taxonService.findByFullText(Synonym.class, "Abies", null, subtree, includeUnpublished, null, true, null, null, null, null); // --> 1\r
         Assert.assertEquals("Expecting 1 entity", 1, pager.getCount().intValue());\r
-        pager = taxonService.findByFullText(Synonym.class, "Abies", null, NO_UNPUBLISHED, null, true, null, null, null, null); // --> 1\r
+        pager = taxonService.findByFullText(Synonym.class, "Abies", null, subtree, NO_UNPUBLISHED, null, true, null, null, null, null); // --> 1\r
         Assert.assertEquals("Expecting 0 entity", 0, pager.getCount().intValue());\r
 \r
-        pager = taxonService.findByFullText(TaxonBase.class, "sec", null, includeUnpublished, null, true, null, null, null, null); // --> 7\r
+        pager = taxonService.findByFullText(TaxonBase.class, "sec", null, subtree, includeUnpublished, null, true, null, null, null, null); // --> 7\r
         Assert.assertEquals("Expecting 8 entities", 9, pager.getCount().intValue());\r
 \r
-        pager = taxonService.findByFullText(null, "genus", null, includeUnpublished, null, true, null, null, null, null); // --> 1\r
+        pager = taxonService.findByFullText(null, "genus", null, subtree, includeUnpublished, null, true, null, null, null, null); // --> 1\r
         Assert.assertEquals("Expecting 1 entity", 1, pager.getCount().intValue());\r
 \r
-        pager = taxonService.findByFullText(Taxon.class, "subalpina", null, includeUnpublished, null, true, null, null, null, null); // --> 0\r
+        pager = taxonService.findByFullText(Taxon.class, "subalpina", null, subtree, includeUnpublished, null, true, null, null, null, null); // --> 0\r
         Assert.assertEquals("Expecting 0 entities", 0, pager.getCount().intValue());\r
 \r
+\r
         // synonym in classification ???\r
     }\r
 \r
@@ -835,35 +870,84 @@ public class TaxonServiceSearchTest extends CdmTransactionalIntegrationTest {
         areaFilter.add(canada);\r
         areaFilter.add(russia);\r
 \r
-        Pager<SearchResult<TaxonBase>> pager = taxonService.findByDistribution(areaFilter, statusFilter, null, 20, 0, null, null);\r
+        Pager<SearchResult<TaxonBase>> pager = taxonService.findByDistribution(areaFilter, statusFilter, null, null, 20, 0, null, null);\r
         Assert.assertEquals("Expecting 2 entities", Integer.valueOf(2), Integer.valueOf(pager.getRecords().size()));\r
 \r
     }\r
 \r
+    @Test\r
+    @DataSet\r
+    public final void testFindTaxaAndNamesByFullText_synonymClassificationSubtree() throws IOException, LuceneParseException, LuceneMultiSearchException {\r
+\r
+        refreshLuceneIndex();\r
+        Classification classification = null;\r
+        TaxonNode subtree = null;\r
+\r
+        //\r
+        Pager<SearchResult<TaxonBase>> pager;\r
+        EnumSet<TaxaAndNamesSearchMode> taxaAndSynonyms = EnumSet.of(TaxaAndNamesSearchMode.doTaxa, TaxaAndNamesSearchMode.doSynonyms, TaxaAndNamesSearchMode.includeUnpublished);\r
+        pager = taxonService.findTaxaAndNamesByFullText(\r
+                taxaAndSynonyms, "Abies", classification, subtree, null, null, null, true, null, null, null, null);\r
+        Assert.assertEquals("doTaxa & doSynonyms & unpublished", 8, pager.getCount().intValue());\r
+\r
+        EnumSet<TaxaAndNamesSearchMode> taxaOnly = EnumSet.of(TaxaAndNamesSearchMode.doTaxa, TaxaAndNamesSearchMode.includeUnpublished);\r
+\r
+        //classification\r
+        classification = classificationService.find(CLASSIFICATION_UUID);\r
+        pager = taxonService.findTaxaAndNamesByFullText(\r
+                taxaAndSynonyms, "Abies", classification, subtree, null, null, null, true, null, null, null, null);\r
+        Assert.assertEquals("doTaxa & doSynonyms & unpublished", 2, pager.getCount().intValue());\r
+            //taxa only\r
+        pager = taxonService.findTaxaAndNamesByFullText(\r
+                taxaOnly, "Abies", classification, subtree, null, null, null, true, null, null, null, null);\r
+        Assert.assertEquals("doTaxa & unpublished", 1, pager.getCount().intValue());\r
+            //synonyms only\r
+        pager = taxonService.findTaxaAndNamesByFullText(\r
+                taxaOnly, "Abies", classification, subtree, null, null, null, true, null, null, null, null);\r
+        Assert.assertEquals("doSynonyms & unpublished", 1, pager.getCount().intValue());\r
+\r
+        classification = null;\r
+\r
+        //subtree\r
+       subtree = nodeService.find(ROOTNODE_CLASSIFICATION_5000);\r
+       pager = taxonService.findTaxaAndNamesByFullText(\r
+               taxaAndSynonyms, "Abies", classification, subtree, null, null, null, true, null, null, null, null);\r
+       Assert.assertEquals("doTaxa & doSynonyms & unpublished", 2, pager.getCount().intValue());\r
+         //taxa only\r
+       pager = taxonService.findTaxaAndNamesByFullText(\r
+               taxaOnly, "Abies", classification, subtree, null, null, null, true, null, null, null, null);\r
+       Assert.assertEquals("doTaxa & unpublished", 1, pager.getCount().intValue());\r
+       subtree = null;\r
+\r
+    }\r
+\r
     @Test\r
     @DataSet\r
     public final void testFindTaxaAndNamesByFullText() throws IOException, LuceneParseException, LuceneMultiSearchException {\r
 \r
         refreshLuceneIndex();\r
+        TaxonNode subtree = null;\r
 \r
         Classification alternateClassification = classificationService.find(CLASSIFICATION_ALT_UUID);\r
         Synonym abiesSubalpina = (Synonym)taxonService.find(ABIES_SUBALPINA_UUID);\r
 \r
         Pager<SearchResult<TaxonBase>> pager;\r
-         pager = taxonService.findTaxaAndNamesByFullText(\r
-                EnumSet.of(TaxaAndNamesSearchMode.doTaxa, TaxaAndNamesSearchMode.doSynonyms, TaxaAndNamesSearchMode.includeUnpublished),\r
-                "Abies", null, null, null, null, true, null, null, null, null);\r
-//        logPagerRecords(pager, Level.DEBUG);\r
+        EnumSet<TaxaAndNamesSearchMode> modes = EnumSet.of(TaxaAndNamesSearchMode.doTaxa, TaxaAndNamesSearchMode.doSynonyms, TaxaAndNamesSearchMode.includeUnpublished);\r
+        pager = taxonService.findTaxaAndNamesByFullText(\r
+                modes, "Abies", null, subtree, null, null, null, true, null, null, null, null);\r
         Assert.assertEquals("doTaxa & doSynonyms & unpublished", 8, pager.getCount().intValue());\r
+//      logPagerRecords(pager, Level.DEBUG);\r
+\r
+         //unpublished\r
         pager = taxonService.findTaxaAndNamesByFullText(TaxaAndNamesSearchMode.taxaAndSynonyms(),\r
-                "Abies", null, null, null, null, true, null, null, null, null);\r
+                "Abies", null, subtree, null, null, null, true, null, null, null, null);\r
         Assert.assertEquals("doTaxa & doSynonyms, published only", 6, pager.getCount().intValue());\r
 \r
         //accepted published, syn not published\r
         abiesSubalpina.getAcceptedTaxon().setPublish(true);\r
         commitAndStartNewTransaction();\r
         pager = taxonService.findTaxaAndNamesByFullText(TaxaAndNamesSearchMode.taxaAndSynonyms(),\r
-                "Abies", null, null, null, null, true, null, null, null, null);\r
+                "Abies", null, subtree, null, null, null, true, null, null, null, null);\r
         Assert.assertEquals("doTaxa & doSynonyms, accepted published", 7, pager.getCount().intValue());\r
 \r
         //accepted published, syn published\r
@@ -871,7 +955,7 @@ public class TaxonServiceSearchTest extends CdmTransactionalIntegrationTest {
         abiesSubalpina.setPublish(true);\r
         commitAndStartNewTransaction();\r
         pager = taxonService.findTaxaAndNamesByFullText(TaxaAndNamesSearchMode.taxaAndSynonyms(),\r
-                "Abies", null, null, null, null, true, null, null, null, null);\r
+                "Abies", null, subtree, null, null, null, true, null, null, null, null);\r
         Assert.assertEquals("Expecting 8 entities", 8, pager.getCount().intValue());\r
 \r
         //accepted not published, syn published\r
@@ -879,63 +963,63 @@ public class TaxonServiceSearchTest extends CdmTransactionalIntegrationTest {
         abiesSubalpina.getAcceptedTaxon().setPublish(false);\r
         commitAndStartNewTransaction();\r
         pager = taxonService.findTaxaAndNamesByFullText(TaxaAndNamesSearchMode.taxaAndSynonyms(),\r
-                "Abies", null, null, null, null, true, null, null, null, null);\r
+                "Abies", null, subtree, null, null, null, true, null, null, null, null);\r
         Assert.assertEquals("Expecting 6 entities. Synonym and accepted should not be found, though synonym is published",\r
                 6, pager.getCount().intValue());\r
 \r
         EnumSet<TaxaAndNamesSearchMode> searchMode = EnumSet.allOf(TaxaAndNamesSearchMode.class);\r
         pager = taxonService.findTaxaAndNamesByFullText(\r
-                searchMode, "Abies", null, null, null, null, true, null, null, null, null);\r
+                searchMode, "Abies", null, subtree, null, null, null, true, null, null, null, null);\r
 //        logPagerRecords(pager, Level.DEBUG);\r
         Assert.assertEquals("all search modes", 8, pager.getCount().intValue());\r
         searchMode.remove(TaxaAndNamesSearchMode.includeUnpublished);\r
         pager = taxonService.findTaxaAndNamesByFullText(\r
-                searchMode, "Abies", null, null, null, null, true, null, null, null, null);\r
+                searchMode, "Abies", null, subtree, null, null, null, true, null, null, null, null);\r
         Assert.assertEquals("all search modes except unpublished", 6, pager.getCount().intValue());\r
 \r
         pager = taxonService.findTaxaAndNamesByFullText(EnumSet.allOf(TaxaAndNamesSearchMode.class),\r
-                "Abies", alternateClassification, null, null, null, true, null, null, null, null);\r
+                "Abies", alternateClassification, subtree, null, null, null, true, null, null, null, null);\r
 //        logPagerRecords(pager, Level.DEBUG);\r
         Assert.assertEquals("all search modes, filtered by alternateClassification", 1, pager.getCount().intValue());\r
 \r
         pager = taxonService.findTaxaAndNamesByFullText(\r
                 EnumSet.of(TaxaAndNamesSearchMode.doSynonyms, TaxaAndNamesSearchMode.includeUnpublished),\r
-                "Abies", null, null, null, null, true, null, null, null, null);\r
+                "Abies", null, subtree, null, null, null, true, null, null, null, null);\r
         Assert.assertEquals("Expecting 2 entity", 2, pager.getCount().intValue());\r
         Set<UUID> uuids = getTaxonUuidSet(pager);\r
         Assert.assertTrue("The real synonym should be contained", uuids.contains(ABIES_SUBALPINA_UUID));\r
         Assert.assertTrue("The pro parte synonym should be contained",uuids.contains(ABIES_LASIOCARPA_UUID));\r
         //without published\r
         pager = taxonService.findTaxaAndNamesByFullText(EnumSet.of(TaxaAndNamesSearchMode.doSynonyms),\r
-                "Abies", null, null, null, null, true, null, null, null, null);\r
+                "Abies", null, subtree, null, null, null, true, null, null, null, null);\r
         Assert.assertEquals("Expecting 0 entities", 0, pager.getCount().intValue());\r
 \r
 \r
         pager = taxonService.findTaxaAndNamesByFullText(\r
                 EnumSet.of(TaxaAndNamesSearchMode.doTaxaByCommonNames, TaxaAndNamesSearchMode.includeUnpublished),\r
-                "Abies", null, null, null, null, true, null, null, null, null);\r
+                "Abies", null, subtree, null, null, null, true, null, null, null, null);\r
         Assert.assertEquals("Expecting 0 entity", 0, pager.getCount().intValue());\r
 \r
         pager = taxonService.findTaxaAndNamesByFullText(\r
                 EnumSet.of(TaxaAndNamesSearchMode.doTaxaByCommonNames, TaxaAndNamesSearchMode.includeUnpublished),\r
-                "Tanne", null, null, null, null, true, null, null, null, null);\r
+                "Tanne", null, subtree, null, null, null, true, null, null, null, null);\r
         Assert.assertEquals("Expecting 1 entity", 1, pager.getRecords().size());\r
         Assert.assertEquals("Expecting 1 entity", 1, pager.getCount().intValue());\r
         pager = taxonService.findTaxaAndNamesByFullText(\r
                 EnumSet.of(TaxaAndNamesSearchMode.doTaxaByCommonNames),\r
-                "Tanne", null, null, null, null, true, null, null, null, null);\r
+                "Tanne", null, subtree, null, null, null, true, null, null, null, null);\r
         Assert.assertEquals("Expecting 0 entity", 0, pager.getRecords().size());\r
 \r
         //misapplied names\r
         pager = taxonService.findTaxaAndNamesByFullText(\r
                 EnumSet.of(TaxaAndNamesSearchMode.doMisappliedNames, TaxaAndNamesSearchMode.includeUnpublished),\r
-                "kawakamii", (Classification)null, null, null, null, true, null, null, null, null);\r
+                "kawakamii", (Classification)null, subtree, null, null, null, true, null, null, null, null);\r
         logFreeTextSearchResults(pager, Level.DEBUG, null);\r
         Assert.assertEquals("Expecting 1 entity", 1, pager.getCount().intValue());\r
         //unpublish accepted taxon\r
         pager = taxonService.findTaxaAndNamesByFullText(\r
                 EnumSet.of(TaxaAndNamesSearchMode.doMisappliedNames),\r
-                "kawakamii", (Classification)null, null, null, null, true, null, null, null, null);\r
+                "kawakamii", (Classification)null, subtree, null, null, null, true, null, null, null, null);\r
         Assert.assertEquals("Expecting 0 entities", 0, pager.getCount().intValue());\r
         //published accepted taxon/misapplied name\r
         Taxon abiesBalsamea = (Taxon)taxonService.find(ABIES_BALSAMEA_UUID);\r
@@ -943,36 +1027,89 @@ public class TaxonServiceSearchTest extends CdmTransactionalIntegrationTest {
         commitAndStartNewTransaction();\r
         pager = taxonService.findTaxaAndNamesByFullText(\r
                 EnumSet.of(TaxaAndNamesSearchMode.doMisappliedNames),\r
-                "kawakamii", (Classification)null, null, null, null, true, null, null, null, null);\r
+                "kawakamii", (Classification)null, subtree, null, null, null, true, null, null, null, null);\r
         Assert.assertEquals("Expecting 1 entities", 1, pager.getCount().intValue());\r
         //unpublished misapplied name\r
-        Taxon misapplied = (Taxon)taxonService.find(D_ABIES_KAWAKAMII_SEC_KOMAROV_UUID);\r
+        Taxon misapplied = (Taxon)taxonService.find(DESC_ABIES_KAWAKAMII_SEC_KOMAROV_UUID);\r
         misapplied.setPublish(false);\r
         commitAndStartNewTransaction();\r
         pager = taxonService.findTaxaAndNamesByFullText(\r
                 EnumSet.of(TaxaAndNamesSearchMode.doMisappliedNames),\r
-                "kawakamii", (Classification)null, null, null, null, true, null, null, null, null);\r
+                "kawakamii", (Classification)null, subtree, null, null, null, true, null, null, null, null);\r
         Assert.assertEquals("Expecting 0 entities", 0, pager.getCount().intValue());\r
 \r
     }\r
 \r
+    @Test\r
+    @DataSet\r
+    public final void testFindTaxaAndNamesByFullText_wildcard() throws IOException, LuceneParseException, LuceneMultiSearchException {\r
+\r
+        refreshLuceneIndex();\r
+        TaxonNode subtree = null;\r
+\r
+        Pager<SearchResult<TaxonBase>> pager;\r
+         pager = taxonService.findTaxaAndNamesByFullText(TaxaAndNamesSearchMode.taxaAndSynonyms(),\r
+                "Abi*", null, subtree, null, null, null, true, null, null, null, null);\r
+//      logFreeTextSearchResults(pager, Level.DEBUG, null);\r
+        Assert.assertEquals("doTaxa & doSynonyms published only", 6, pager.getCount().intValue());\r
+        pager = taxonService.findTaxaAndNamesByFullText(TaxaAndNamesSearchMode.taxaAndSynonyms(),\r
+                "*bies", null, subtree, null, null, null, true, null, null, null, null);\r
+        Assert.assertEquals("doTaxa & doSynonyms, published only", 6, pager.getCount().intValue());\r
+        // logFreeTextSearchResults(pager, Level.ERROR, null);\r
+        pager = taxonService.findTaxaAndNamesByFullText(TaxaAndNamesSearchMode.taxaAndSynonyms(),\r
+                "?bies", null, subtree, null, null, null, true, null, null, null, null);\r
+        Assert.assertEquals("doTaxa & doSynonyms, published only", 6, pager.getCount().intValue());\r
+        // logFreeTextSearchResults(pager, Level.ERROR, null);\r
+        pager = taxonService.findTaxaAndNamesByFullText(TaxaAndNamesSearchMode.taxaAndSynonyms(),\r
+                "*", null, subtree, null, null, null, true, null, null, null, null);\r
+        Assert.assertEquals("doTaxa & doSynonyms, published only", 7, pager.getCount().intValue());\r
+    }\r
+\r
+    @Test\r
+    @DataSet\r
+    // @Ignore // FIXME: fails due  org.apache.lucene.queryparser.classic.ParseException: Cannot parse 'relatedFrom.titleCache:()': Encountered " ")" ") "" at line 1, column 24.\r
+    public final void testFindTaxaAndNamesByFullText_empty_querString() throws IOException, LuceneParseException, LuceneMultiSearchException {\r
+\r
+        refreshLuceneIndex();\r
+        TaxonNode subtree = null;\r
+\r
+        Pager<SearchResult<TaxonBase>> pager;\r
+        pager = taxonService.findTaxaAndNamesByFullText(EnumSet.of(TaxaAndNamesSearchMode.doTaxa),\r
+                "", null, subtree, null, null, null, true, null, null, null, null);\r
+        Assert.assertEquals("doTaxa, published only", 7, pager.getCount().intValue());\r
+\r
+        pager = taxonService.findTaxaAndNamesByFullText(TaxaAndNamesSearchMode.taxaAndSynonyms(),\r
+                "", null, subtree, null, null, null, true, null, null, null, null);\r
+        Assert.assertEquals("doTaxa & doSynonyms, published only", 7, pager.getCount().intValue());\r
+\r
+        pager = taxonService.findTaxaAndNamesByFullText(EnumSet.of(TaxaAndNamesSearchMode.doTaxa, TaxaAndNamesSearchMode.doMisappliedNames),\r
+                null, null, subtree, null, null, null, true, null, null, null, null);\r
+        Assert.assertEquals("doTaxa & doMisappliedNames published only", 7, pager.getCount().intValue());\r
+\r
+        pager = taxonService.findTaxaAndNamesByFullText(EnumSet.of(TaxaAndNamesSearchMode.doTaxa, TaxaAndNamesSearchMode.doMisappliedNames, TaxaAndNamesSearchMode.doTaxaByCommonNames),\r
+                null, null, subtree, null, null, null, true, null, null, null, null);\r
+        Assert.assertEquals("doTaxa & doMisappliedNames & doTaxaByCommonNames , published only", 7, pager.getCount().intValue());\r
+        // logFreeTextSearchResults(pager, Level.ERROR, null);\r
+    }\r
+\r
     @Test\r
     @DataSet\r
     //test for https://dev.e-taxonomy.eu/redmine/issues/7486\r
     public final void testFindTaxaAndNamesByFullText_synonymsAndMisapplied_7486() throws IOException, LuceneParseException, LuceneMultiSearchException {\r
 \r
         refreshLuceneIndex();\r
+        TaxonNode subtree = null;\r
 \r
         //misapplied names\r
         Pager<SearchResult<TaxonBase>> pager = taxonService.findTaxaAndNamesByFullText(\r
                 EnumSet.of(TaxaAndNamesSearchMode.doSynonyms, TaxaAndNamesSearchMode.doMisappliedNames, TaxaAndNamesSearchMode.includeUnpublished),\r
-                "Abies", (Classification)null, null, null, null, true, null, null, null, null);\r
+                "Abies", (Classification)null, subtree, null, null, null, true, null, null, null, null);\r
         logFreeTextSearchResults(pager, Level.DEBUG, null);\r
         Assert.assertEquals("Expecting 3 entity", 3, pager.getCount().intValue());\r
         Set<UUID> uuids = getTaxonUuidSet(pager);\r
         Assert.assertTrue("The real synonym should be contained", uuids.contains(ABIES_SUBALPINA_UUID));\r
         Assert.assertTrue("The pro parte synonym should be contained",uuids.contains(ABIES_LASIOCARPA_UUID));\r
-        Assert.assertTrue("The misapplied name should be contained",uuids.contains(D_ABIES_KAWAKAMII_SEC_KOMAROV_UUID));\r
+        Assert.assertTrue("The misapplied name should be contained",uuids.contains(DESC_ABIES_KAWAKAMII_SEC_KOMAROV_UUID));\r
     }\r
 \r
     @Test\r
@@ -980,22 +1117,29 @@ public class TaxonServiceSearchTest extends CdmTransactionalIntegrationTest {
     public final void testFindTaxaAndNamesByFullText_PhraseQuery() throws IOException, LuceneParseException, LuceneMultiSearchException {\r
 \r
         refreshLuceneIndex();\r
+        TaxonNode subtree = null;\r
 \r
         Pager<SearchResult<TaxonBase>> pager = taxonService.findTaxaAndNamesByFullText(\r
                 EnumSet.of(TaxaAndNamesSearchMode.doTaxa, TaxaAndNamesSearchMode.doSynonyms, TaxaAndNamesSearchMode.includeUnpublished),\r
-                "\"Abies alba\"", null, null, null, null, true, null, null, null, null);\r
+                "\"Abies alba\"", null, subtree, null, null, null, true, null, null, null, null);\r
 //        logPagerRecords(pager, Level.DEBUG);\r
         Assert.assertEquals("doTaxa & doSynonyms with simple phrase query", 1, pager.getCount().intValue());\r
 \r
         pager = taxonService.findTaxaAndNamesByFullText(\r
                 EnumSet.of(TaxaAndNamesSearchMode.doTaxa, TaxaAndNamesSearchMode.doSynonyms, TaxaAndNamesSearchMode.includeUnpublished),\r
-                "\"Abies al*\"", null, null, null, null, true, null, null, null, null);\r
+                "\"Abies al*\"", null, subtree, null, null, null, true, null, null, null, null);\r
 //        logPagerRecords(pager, Level.DEBUG);\r
         Assert.assertEquals("doTaxa & doSynonyms with complex phrase query", 1, pager.getCount().intValue());\r
 \r
         pager = taxonService.findTaxaAndNamesByFullText(\r
                 EnumSet.of(TaxaAndNamesSearchMode.doTaxa, TaxaAndNamesSearchMode.doSynonyms, TaxaAndNamesSearchMode.includeUnpublished),\r
-                "\"Abies*\"", null, null, null, null, true, null, null, null, null);\r
+                "\"Abies*\"", null, subtree, null, null, null, true, null, null, null, null);\r
+//        logPagerRecords(pager, Level.DEBUG);\r
+        Assert.assertEquals("doTaxa & doSynonyms with simple phrase query", 8, pager.getCount().intValue());\r
+\r
+        pager = taxonService.findTaxaAndNamesByFullText(\r
+                EnumSet.of(TaxaAndNamesSearchMode.doTaxa, TaxaAndNamesSearchMode.doSynonyms, TaxaAndNamesSearchMode.includeUnpublished),\r
+                "\"Abies*\"", null, subtree, null, null, null, true, null, null, null, null);\r
 //        logPagerRecords(pager, Level.DEBUG);\r
         Assert.assertEquals("doTaxa & doSynonyms with simple phrase query", 8, pager.getCount().intValue());\r
 \r
@@ -1006,6 +1150,7 @@ public class TaxonServiceSearchTest extends CdmTransactionalIntegrationTest {
     public final void testFindTaxaAndNamesByFullText_Sort() throws IOException, LuceneParseException, LuceneMultiSearchException {\r
 \r
         refreshLuceneIndex();\r
+        TaxonNode subtree = null;\r
 \r
         List<OrderHint> orderHints = new ArrayList<>();\r
 \r
@@ -1015,7 +1160,7 @@ public class TaxonServiceSearchTest extends CdmTransactionalIntegrationTest {
         orderHints.addAll(OrderHint.ORDER_BY_ID.asList());\r
         Pager<SearchResult<TaxonBase>> pager = taxonService.findTaxaAndNamesByFullText(\r
                 EnumSet.of(TaxaAndNamesSearchMode.doTaxa),\r
-                "Abies", null, null, null, null, true, null, null, orderHints, null);\r
+                "Abies", null, subtree, null, null, null, true, null, null, orderHints, null);\r
 //        logSearchResults(pager, Level.DEBUG, docFields2log);\r
         int lastId = -1;\r
         for(SearchResult<TaxonBase> rs : pager.getRecords()){\r
@@ -1028,7 +1173,7 @@ public class TaxonServiceSearchTest extends CdmTransactionalIntegrationTest {
         orderHints.addAll(OrderHint.ORDER_BY_ID.asList());\r
         pager = taxonService.findTaxaAndNamesByFullText(\r
                 EnumSet.of(TaxaAndNamesSearchMode.doTaxa, TaxaAndNamesSearchMode.doSynonyms),\r
-                "Abies", null, null, null, null, true, null, null, orderHints, null);\r
+                "Abies", null, subtree, null, null, null, true, null, null, orderHints, null);\r
 //        logSearchResults(pager, Level.DEBUG, docFields2log);\r
 \r
         lastId = -1;\r
@@ -1044,7 +1189,7 @@ public class TaxonServiceSearchTest extends CdmTransactionalIntegrationTest {
         orderHints.addAll(OrderHint.NOMENCLATURAL_SORT_ORDER.asList());\r
         pager = taxonService.findTaxaAndNamesByFullText(\r
                 EnumSet.of(TaxaAndNamesSearchMode.doTaxa, TaxaAndNamesSearchMode.doSynonyms),\r
-                "Abies", null, null, null, null, true, null, null, orderHints, null);\r
+                "Abies", null, subtree, null, null, null, true, null, null, orderHints, null);\r
         logFreeTextSearchResults(pager, Level.DEBUG, null);\r
 \r
     }\r
@@ -1054,6 +1199,7 @@ public class TaxonServiceSearchTest extends CdmTransactionalIntegrationTest {
     @Ignore //ignore until #7487 is fixed\r
     public final void testFindTaxaAndNamesByFullText_AreaFilter_7487() throws IOException, LuceneParseException, LuceneMultiSearchException {\r
         refreshLuceneIndex();\r
+        TaxonNode subtree = null;\r
         Set<NamedArea> a_germany_canada_russia = new HashSet<>();\r
         a_germany_canada_russia.add(germany);\r
         a_germany_canada_russia.add(canada);\r
@@ -1065,7 +1211,7 @@ public class TaxonServiceSearchTest extends CdmTransactionalIntegrationTest {
 \r
         Pager<SearchResult<TaxonBase>> pager = taxonService.findTaxaAndNamesByFullText(\r
                 EnumSet.of(TaxaAndNamesSearchMode.doSynonyms, TaxaAndNamesSearchMode.includeUnpublished),\r
-                "Abies", null, a_germany_canada_russia, present_native, null, true, null, null, null, null);\r
+                "Abies", null, subtree, a_germany_canada_russia, present_native, null, true, null, null, null, null);\r
         Assert.assertEquals("Synonyms with matching area filter", 2, pager.getCount().intValue());\r
         Set<UUID> uuids = this.getTaxonUuidSet(pager);\r
         Assert.assertTrue("Synonym of balsamea should be in", uuids.contains(ABIES_SUBALPINA_UUID));\r
@@ -1083,7 +1229,7 @@ public class TaxonServiceSearchTest extends CdmTransactionalIntegrationTest {
         //should give same results as above\r
         pager = taxonService.findTaxaAndNamesByFullText(\r
                 EnumSet.of(TaxaAndNamesSearchMode.doSynonyms, TaxaAndNamesSearchMode.includeUnpublished),\r
-                "Abies", null, a_germany_canada_russia, present_native, null, true, null, null, null, null);\r
+                "Abies", null, subtree, a_germany_canada_russia, present_native, null, true, null, null, null, null);\r
 //        Assert.assertEquals("Synonyms with matching area filter", 2, pager.getCount().intValue());\r
 //        uuids = this.getTaxonUuidSet(pager);\r
 //        Assert.assertTrue("Synonym of balsamea should be in", uuids.contains(ABIES_SUBALPINA_UUID));\r
@@ -1092,16 +1238,16 @@ public class TaxonServiceSearchTest extends CdmTransactionalIntegrationTest {
         ///MISAPPLIED\r
         pager = taxonService.findTaxaAndNamesByFullText(\r
                 EnumSet.of(TaxaAndNamesSearchMode.doMisappliedNames, TaxaAndNamesSearchMode.doTaxa, TaxaAndNamesSearchMode.includeUnpublished),\r
-                "Abies", null, a_germany_canada_russia, present_native, null, true, null, null, null, null);\r
+                "Abies", null, subtree, a_germany_canada_russia, present_native, null, true, null, null, null, null);\r
         Assert.assertEquals("misappliedNames with matching area & status filter", 3, pager.getCount().intValue());\r
         uuids = this.getTaxonUuidSet(pager);\r
-        Assert.assertTrue("Misapplied name should be in", uuids.contains(D_ABIES_KAWAKAMII_SEC_KOMAROV_UUID));\r
+        Assert.assertTrue("Misapplied name should be in", uuids.contains(DESC_ABIES_KAWAKAMII_SEC_KOMAROV_UUID));\r
 \r
         t_abies_balsamea = (Taxon)taxonService.find(ABIES_BALSAMEA_UUID);\r
         relsTo = t_abies_balsamea.getMisappliedNameRelations();\r
         Assert.assertEquals(1, relsTo.size());\r
         taxonRelation = relsTo.iterator().next();\r
-        Assert.assertEquals(taxonRelation.getFromTaxon().getUuid(), D_ABIES_KAWAKAMII_SEC_KOMAROV_UUID);\r
+        Assert.assertEquals(taxonRelation.getFromTaxon().getUuid(), DESC_ABIES_KAWAKAMII_SEC_KOMAROV_UUID);\r
         taxonRelation.setType(TaxonRelationshipType.PRO_PARTE_MISAPPLIED_NAME_FOR());\r
         taxonService.saveOrUpdate(t_abies_balsamea);\r
         commitAndStartNewTransaction(null);\r
@@ -1109,10 +1255,10 @@ public class TaxonServiceSearchTest extends CdmTransactionalIntegrationTest {
         //strange it works here before fixing #7487 already\r
         pager = taxonService.findTaxaAndNamesByFullText(\r
                 EnumSet.of(TaxaAndNamesSearchMode.doMisappliedNames, TaxaAndNamesSearchMode.doTaxa, TaxaAndNamesSearchMode.includeUnpublished),\r
-                "Abies", null, a_germany_canada_russia, present_native, null, true, null, null, null, null);\r
+                "Abies", null, subtree, a_germany_canada_russia, present_native, null, true, null, null, null, null);\r
         Assert.assertEquals("misappliedNames with matching area & status filter", 3, pager.getCount().intValue());\r
         uuids = this.getTaxonUuidSet(pager);\r
-        Assert.assertTrue("Pro parte misapplied name should be in", uuids.contains(D_ABIES_KAWAKAMII_SEC_KOMAROV_UUID));\r
+        Assert.assertTrue("Pro parte misapplied name should be in", uuids.contains(DESC_ABIES_KAWAKAMII_SEC_KOMAROV_UUID));\r
 \r
     }\r
 \r
@@ -1122,6 +1268,7 @@ public class TaxonServiceSearchTest extends CdmTransactionalIntegrationTest {
     public final void testFindTaxaAndNamesByFullText_AreaFilter() throws IOException, LuceneParseException, LuceneMultiSearchException {\r
 \r
         refreshLuceneIndex();\r
+        TaxonNode subtree = null;\r
 \r
         Set<NamedArea> a_germany_canada_russia = new HashSet<>();\r
         a_germany_canada_russia.add(germany);\r
@@ -1143,7 +1290,7 @@ public class TaxonServiceSearchTest extends CdmTransactionalIntegrationTest {
 \r
         Pager<SearchResult<TaxonBase>> pager = taxonService.findTaxaAndNamesByFullText(\r
                 EnumSet.of(TaxaAndNamesSearchMode.doTaxa, TaxaAndNamesSearchMode.includeUnpublished),\r
-                "Abies", null, a_germany_canada_russia, null, null, true, null, null, null, null);\r
+                "Abies", null, subtree, a_germany_canada_russia, null, null, true, null, null, null, null);\r
         logFreeTextSearchResults(pager, Level.DEBUG, null);\r
         Assert.assertEquals("Synonyms with matching area filter", 2, pager.getCount().intValue());\r
         Set<UUID> uuids = this.getTaxonUuidSet(pager);\r
@@ -1152,7 +1299,7 @@ public class TaxonServiceSearchTest extends CdmTransactionalIntegrationTest {
 \r
         pager = taxonService.findTaxaAndNamesByFullText(\r
                 EnumSet.of(TaxaAndNamesSearchMode.doSynonyms, TaxaAndNamesSearchMode.includeUnpublished),\r
-                "Abies", null, a_germany_canada_russia, present_native, null, true, null, null, null, null);\r
+                "Abies", null, subtree, a_germany_canada_russia, present_native, null, true, null, null, null, null);\r
         Assert.assertEquals("Synonyms with matching area filter", 2, pager.getCount().intValue());\r
         uuids = this.getTaxonUuidSet(pager);\r
         Assert.assertTrue("Synonym of balsamea should be in", uuids.contains(ABIES_SUBALPINA_UUID));\r
@@ -1160,7 +1307,7 @@ public class TaxonServiceSearchTest extends CdmTransactionalIntegrationTest {
 \r
         pager = taxonService.findTaxaAndNamesByFullText(\r
                 EnumSet.of(TaxaAndNamesSearchMode.doTaxa, TaxaAndNamesSearchMode.doSynonyms, TaxaAndNamesSearchMode.includeUnpublished),\r
-                "Abies", null, a_germany_canada_russia, null, null, true, null, null, null, null);\r
+                "Abies", null, subtree, a_germany_canada_russia, null, null, true, null, null, null, null);\r
         logFreeTextSearchResults(pager, Level.DEBUG, null);\r
         Assert.assertEquals("taxa and synonyms with matching area filter", 4, pager.getCount().intValue());\r
         uuids = this.getTaxonUuidSet(pager);\r
@@ -1168,11 +1315,11 @@ public class TaxonServiceSearchTest extends CdmTransactionalIntegrationTest {
         Assert.assertTrue("Accepted taxon with area should be in", uuids.contains(ABIES_BALSAMEA_UUID));\r
         Assert.assertTrue("Synonym of balsamea should be in", uuids.contains(ABIES_SUBALPINA_UUID));\r
         Assert.assertTrue("Pro parte synonym of balsamea should be in", uuids.contains(ABIES_LASIOCARPA_UUID));\r
-        Assert.assertFalse("Misapplied name should NOT be in", uuids.contains(D_ABIES_KAWAKAMII_SEC_KOMAROV_UUID));\r
+        Assert.assertFalse("Misapplied name should NOT be in", uuids.contains(DESC_ABIES_KAWAKAMII_SEC_KOMAROV_UUID));\r
 \r
         pager = taxonService.findTaxaAndNamesByFullText(\r
                 EnumSet.of(TaxaAndNamesSearchMode.doTaxa, TaxaAndNamesSearchMode.doSynonyms, TaxaAndNamesSearchMode.includeUnpublished),\r
-                "Abies", null, a_germany_canada_russia, present_native, null, true, null, null, null, null);\r
+                "Abies", null, subtree, a_germany_canada_russia, present_native, null, true, null, null, null, null);\r
         Assert.assertEquals("taxa and synonyms with matching area & status filter 4", 4, pager.getCount().intValue());\r
         uuids = this.getTaxonUuidSet(pager);\r
         Assert.assertTrue("Synonym of balsamea should be in", uuids.contains(ABIES_SUBALPINA_UUID));\r
@@ -1182,7 +1329,7 @@ public class TaxonServiceSearchTest extends CdmTransactionalIntegrationTest {
 \r
         pager = taxonService.findTaxaAndNamesByFullText(\r
                 EnumSet.of(TaxaAndNamesSearchMode.doTaxa, TaxaAndNamesSearchMode.doSynonyms, TaxaAndNamesSearchMode.includeUnpublished),\r
-                "Abies", null, a_germany_canada_russia, present, null, true, null, null, null, null);\r
+                "Abies", null, subtree, a_germany_canada_russia, present, null, true, null, null, null, null);\r
         Assert.assertEquals("taxa and synonyms with matching area & status filter 3", 3, pager.getCount().intValue());\r
         uuids = this.getTaxonUuidSet(pager);\r
         Assert.assertTrue("Abies balsamea (accepted taxon) should be in", uuids.contains(ABIES_BALSAMEA_UUID));\r
@@ -1191,12 +1338,12 @@ public class TaxonServiceSearchTest extends CdmTransactionalIntegrationTest {
 \r
         pager = taxonService.findTaxaAndNamesByFullText(\r
                 EnumSet.of(TaxaAndNamesSearchMode.doTaxa, TaxaAndNamesSearchMode.doSynonyms, TaxaAndNamesSearchMode.includeUnpublished),\r
-                "Abies", null, a_russia, present, null, true, null, null, null, null);\r
+                "Abies", null, subtree, a_russia, present, null, true, null, null, null, null);\r
         Assert.assertEquals("taxa and synonyms with non matching area & status filter", 0, pager.getCount().intValue());\r
 \r
         pager = taxonService.findTaxaAndNamesByFullText(\r
                 EnumSet.of(TaxaAndNamesSearchMode.doTaxaByCommonNames, TaxaAndNamesSearchMode.includeUnpublished),\r
-                "Tanne", null, a_germany_canada_russia, present_native, null, true, null, null, null, null);\r
+                "Tanne", null, subtree, a_germany_canada_russia, present_native, null, true, null, null, null, null);\r
         Assert.assertEquals("ByCommonNames with area filter", 1, pager.getCount().intValue());\r
         uuids = this.getTaxonUuidSet(pager);\r
         Assert.assertTrue("Abies balsamea should be in", uuids.contains(ABIES_BALSAMEA_UUID));\r
@@ -1204,10 +1351,10 @@ public class TaxonServiceSearchTest extends CdmTransactionalIntegrationTest {
         // abies_kawakamii_sensu_komarov as misapplied name for t_abies_balsamea\r
         pager = taxonService.findTaxaAndNamesByFullText(\r
                 EnumSet.of(TaxaAndNamesSearchMode.doMisappliedNames, TaxaAndNamesSearchMode.includeUnpublished),\r
-                "Abies", null, a_germany_canada_russia, present_native, null, true, null, null, null, null);\r
+                "Abies", null, subtree, a_germany_canada_russia, present_native, null, true, null, null, null, null);\r
         Assert.assertEquals("misappliedNames with matching area & status filter", 1, pager.getCount().intValue());\r
         uuids = this.getTaxonUuidSet(pager);\r
-        Assert.assertTrue("Misapplied name should  be in", uuids.contains(D_ABIES_KAWAKAMII_SEC_KOMAROV_UUID));\r
+        Assert.assertTrue("Misapplied name should  be in", uuids.contains(DESC_ABIES_KAWAKAMII_SEC_KOMAROV_UUID));\r
 \r
 \r
         // 1. remove existing taxon relation\r
@@ -1221,11 +1368,11 @@ public class TaxonServiceSearchTest extends CdmTransactionalIntegrationTest {
 \r
         pager = taxonService.findTaxaAndNamesByFullText(\r
                 EnumSet.of(TaxaAndNamesSearchMode.doMisappliedNames, TaxaAndNamesSearchMode.includeUnpublished),\r
-                "Abies", null, a_germany_canada_russia, present_native, null, true, null, null, null, null);\r
+                "Abies", null, subtree, a_germany_canada_russia, present_native, null, true, null, null, null, null);\r
         Assert.assertEquals("misappliedNames with matching area & status filter, should match nothing now", 0, pager.getCount().intValue());\r
 \r
         // 2. now add abies_kawakamii_sensu_komarov as misapplied name for t_abies_alba and search for misapplications in Russia: ABSENT\r
-        Taxon t_abies_kawakamii_sensu_komarov = (Taxon)taxonService.find(D_ABIES_KAWAKAMII_SEC_KOMAROV_UUID);\r
+        Taxon t_abies_kawakamii_sensu_komarov = (Taxon)taxonService.find(DESC_ABIES_KAWAKAMII_SEC_KOMAROV_UUID);\r
         Taxon t_abies_alba = (Taxon)taxonService.find(ABIES_ALBA_UUID);\r
         t_abies_alba.addMisappliedName(t_abies_kawakamii_sensu_komarov, null, null);\r
         taxonService.update(t_abies_kawakamii_sensu_komarov);\r
@@ -1233,10 +1380,10 @@ public class TaxonServiceSearchTest extends CdmTransactionalIntegrationTest {
 \r
         pager = taxonService.findTaxaAndNamesByFullText(\r
                 EnumSet.of(TaxaAndNamesSearchMode.doMisappliedNames, TaxaAndNamesSearchMode.includeUnpublished),\r
-                "Abies", null, a_germany_canada_russia, absent, null, true, null, null, null, null);\r
+                "Abies", null, subtree, a_germany_canada_russia, absent, null, true, null, null, null, null);\r
         Assert.assertEquals("misappliedNames with matching area & status filter, should find one", 1, pager.getCount().intValue());\r
         uuids = this.getTaxonUuidSet(pager);\r
-        Assert.assertTrue("Misapplied name should  be in", uuids.contains(D_ABIES_KAWAKAMII_SEC_KOMAROV_UUID));\r
+        Assert.assertTrue("Misapplied name should  be in", uuids.contains(DESC_ABIES_KAWAKAMII_SEC_KOMAROV_UUID));\r
 \r
     }\r
 \r
@@ -1245,6 +1392,7 @@ public class TaxonServiceSearchTest extends CdmTransactionalIntegrationTest {
     //http://dev.e-taxonomy.eu/trac/ticket/5477\r
     public final void testFindTaxaAndNamesByFullText_AreaFilter_issue5477() throws IOException, LuceneParseException, LuceneMultiSearchException {\r
 \r
+        TaxonNode subtree = null;\r
         Set<NamedArea> a_germany_canada_russia = new HashSet<>();\r
         a_germany_canada_russia.add(germany);\r
         a_germany_canada_russia.add(canada);\r
@@ -1254,7 +1402,7 @@ public class TaxonServiceSearchTest extends CdmTransactionalIntegrationTest {
         Set<PresenceAbsenceTerm> absent = new HashSet<>();\r
         absent.add(PresenceAbsenceTerm.ABSENT());\r
 \r
-        Taxon t_abies_kawakamii_sensu_komarov = (Taxon)taxonService.find(D_ABIES_KAWAKAMII_SEC_KOMAROV_UUID);\r
+        Taxon t_abies_kawakamii_sensu_komarov = (Taxon)taxonService.find(DESC_ABIES_KAWAKAMII_SEC_KOMAROV_UUID);\r
         Taxon t_abies_alba = (Taxon)taxonService.find(ABIES_ALBA_UUID);\r
         t_abies_alba.addMisappliedName(t_abies_kawakamii_sensu_komarov, null, null);\r
 \r
@@ -1270,7 +1418,7 @@ public class TaxonServiceSearchTest extends CdmTransactionalIntegrationTest {
 \r
         Pager<SearchResult<TaxonBase>> pager = taxonService.findTaxaAndNamesByFullText(\r
                   EnumSet.of(TaxaAndNamesSearchMode.doMisappliedNames),\r
-                  "Abies", null, a_germany_canada_russia, absent, null, true, null, null, null, null);\r
+                  "Abies", null, subtree, a_germany_canada_russia, absent, null, true, null, null, null, null);\r
         Assert.assertEquals("misappliedNames with matching area & status filter, should find one", 1, pager.getCount().intValue());\r
     }\r
 \r
@@ -1287,16 +1435,22 @@ public class TaxonServiceSearchTest extends CdmTransactionalIntegrationTest {
     public final void testFindByEverythingFullText() throws IOException, LuceneParseException, LuceneMultiSearchException {\r
 \r
         refreshLuceneIndex();\r
-\r
+        TaxonNode subtree = null;\r
+        EnumSet<TaxaAndNamesSearchMode> mode = TaxaAndNamesSearchMode.taxaAndSynonymsWithUnpublished();\r
         // via Taxon\r
-        Pager<SearchResult<TaxonBase>>pager = taxonService.findByEverythingFullText("Abies", null, includeUnpublished, null, true, null, null, null, null);\r
+        Pager<SearchResult<TaxonBase>>pager = taxonService.findByEverythingFullText("Abies", null, subtree, includeUnpublished, null, true, null, null, null, null);\r
+//        Pager<SearchResult<TaxonBase>> pager = taxonService.findTaxaAndNamesByFullText(mode,\r
+//                "Abies", null, null, null, null, true, null, null, null, null);\r
         logFreeTextSearchResults(pager, Level.DEBUG, null);\r
         Assert.assertTrue("Expecting at least 7 entities for 'Abies'", pager.getCount() > 7);\r
         Assert.assertNotNull("Expecting entity", pager.getRecords().get(0).getEntity());\r
-        Assert.assertEquals("Expecting Taxon entity", Taxon.class, pager.getRecords().get(0).getEntity().getClass());\r
+//        Assert.assertEquals("Expecting Taxon entity", Taxon.class, pager.getRecords().get(0).getEntity().getClass());\r
 \r
         // via DescriptionElement\r
-        pager = taxonService.findByEverythingFullText("present", null, includeUnpublished, null, true, null, null, null, null);\r
+        pager = taxonService.findByEverythingFullText("present", null, subtree, includeUnpublished, null, true, null, null, null, null);\r
+        //this is not covered by findTaxaAndNamesByFullText\r
+//        pager = taxonService.findTaxaAndNamesByFullText(mode,\r
+//                "present", null, null, null, null, true, null, null, null, null);\r
         Assert.assertEquals("Expecting one entity when searching for area 'present'", 1, pager.getCount().intValue());\r
         Assert.assertNotNull("Expecting entity", pager.getRecords().get(0).getEntity());\r
         Assert.assertEquals("Expecting Taxon entity", Taxon.class, CdmBase.deproxy(pager.getRecords().get(0).getEntity()).getClass());\r
@@ -1310,16 +1464,27 @@ public class TaxonServiceSearchTest extends CdmTransactionalIntegrationTest {
     public final void findByEveryThingFullText() throws IOException, LuceneParseException, LuceneMultiSearchException {\r
 \r
         refreshLuceneIndex();\r
+        TaxonNode subtree = null;\r
 \r
-        Pager<SearchResult<TaxonBase>> pager = taxonService.findByEverythingFullText("genus", null, includeUnpublished, null, false, null, null, null, null); // --> 1\r
+        Classification classification = null;\r
+        EnumSet<TaxaAndNamesSearchMode> mode = TaxaAndNamesSearchMode.taxaAndSynonymsWithUnpublished();\r
+\r
+        Pager<SearchResult<TaxonBase>> pager = taxonService.findByEverythingFullText("genus", null, subtree, includeUnpublished, null, false, null, null, null, null); // --> 1\r
+//        Pager<SearchResult<TaxonBase>> pager = taxonService.findTaxaAndNamesByFullText(mode,\r
+//                "genus", classification, null, null, null, false, null, null, null, null);\r
         Assert.assertEquals("Expecting 1 entity", 1, pager.getCount().intValue());\r
 \r
         //FIXME FAILS: abies balamea is returned twice, see also testFullText_Grouping()\r
-        pager = taxonService.findByEverythingFullText("Balsam", null, includeUnpublished, Arrays.asList(new Language[]{Language.GERMAN()}), false, null, null, null, null);\r
+        pager = taxonService.findByEverythingFullText("Balsam", null, subtree, includeUnpublished, Arrays.asList(new Language[]{Language.GERMAN()}), false, null, null, null, null);\r
         logFreeTextSearchResults(pager, Level.DEBUG, null);\r
+//        pager = taxonService.findTaxaAndNamesByFullText(EnumSet.allOf(TaxaAndNamesSearchMode.class),\r
+//                "Balsam", classification, null, null, Arrays.asList(new Language[]{Language.GERMAN()}), false, null, null, null, null);\r
         Assert.assertEquals("expecting to find the Abies balsamea via the GERMAN DescriptionElements", 1, pager.getCount().intValue());\r
 \r
-        pager = taxonService.findByEverythingFullText("Abies", null, includeUnpublished, null, true, null, null, null, null);\r
+        //TODO fieldHighlight does not yet work\r
+        pager = taxonService.findByEverythingFullText("Abies", null, subtree, includeUnpublished, null, true, null, null, null, null);\r
+//        pager = taxonService.findTaxaAndNamesByFullText(mode,\r
+//                "Abies", classification, null, null, Arrays.asList(new Language[]{Language.GERMAN()}), false, null, null, null, null);\r
         Assert.assertEquals("Expecting 8 entities", 8, pager.getCount().intValue());\r
         SearchResult<TaxonBase> searchResult = pager.getRecords().get(0);\r
         Assert.assertTrue("the map of highlighted fragments should contain at least one item", searchResult.getFieldHighlightMap().size() > 0);\r
@@ -1389,7 +1554,7 @@ public class TaxonServiceSearchTest extends CdmTransactionalIntegrationTest {
     @Test\r
     @DataSet\r
     public final void benchmarkFindByCommonNameLucene() throws IOException, LuceneParseException {\r
-\r
+        TaxonNode subtree = null;\r
         createRandomTaxonWithCommonName(NUM_OF_NEW_RADOM_ENTITIES);\r
 \r
         refreshLuceneIndex();\r
@@ -1398,7 +1563,7 @@ public class TaxonServiceSearchTest extends CdmTransactionalIntegrationTest {
 \r
         long startMillis = System.currentTimeMillis();\r
         for (int indx = 0; indx < BENCHMARK_ROUNDS; indx++) {\r
-            pager = taxonService.findByDescriptionElementFullText(CommonTaxonName.class, "Wei"+UTF8.SHARP_S+"*", null, null, null, false, null, null, null, null);\r
+            pager = taxonService.findByDescriptionElementFullText(CommonTaxonName.class, "Wei"+UTF8.SHARP_S+"*", null, subtree, null, null, false, null, null, null, null);\r
             if (logger.isDebugEnabled()) {\r
                 logger.debug("[" + indx + "]" + pager.getRecords().get(0).getEntity().getTitleCache());\r
             }\r
@@ -1487,8 +1652,8 @@ public class TaxonServiceSearchTest extends CdmTransactionalIntegrationTest {
         TaxonDescription d_abies_alba = TaxonDescription.NewInstance(t_abies_alba);\r
         TaxonDescription d_abies_balsamea = TaxonDescription.NewInstance(t_abies_balsamea);\r
 \r
-        d_abies_alba.setUuid(D_ABIES_ALBA_UUID);\r
-        d_abies_balsamea.setUuid(D_ABIES_BALSAMEA_UUID);\r
+        d_abies_alba.setUuid(DESC_ABIES_ALBA_UUID);\r
+        d_abies_balsamea.setUuid(DESC_ABIES_BALSAMEA_UUID);\r
 \r
 \r
         // CommonTaxonName\r
diff --git a/cdmlib-services/src/test/java/eu/etaxonomy/cdm/api/service/TypeDesignationSetManagerTest.java b/cdmlib-services/src/test/java/eu/etaxonomy/cdm/api/service/TypeDesignationSetManagerTest.java
new file mode 100755 (executable)
index 0000000..c4b597f
--- /dev/null
@@ -0,0 +1,261 @@
+/**
+* Copyright (C) 2018 EDIT
+* European Distributed Institute of Taxonomy
+* http://www.e-taxonomy.eu
+*
+* The contents of this file are subject to the Mozilla Public License Version 1.1
+* See LICENSE.TXT at the top of this package for the full license terms.
+*/
+package eu.etaxonomy.cdm.api.service;
+
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+
+import java.io.FileNotFoundException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.log4j.Logger;
+import org.junit.Before;
+import org.junit.Test;
+
+import eu.etaxonomy.cdm.api.service.exception.RegistrationValidationException;
+import eu.etaxonomy.cdm.api.service.name.TypeDesignationSetManager;
+import eu.etaxonomy.cdm.api.service.name.TypeDesignationSetManager.TypeDesignationWorkingSet;
+import eu.etaxonomy.cdm.model.common.IdentifiableSource;
+import eu.etaxonomy.cdm.model.media.Media;
+import eu.etaxonomy.cdm.model.name.NameTypeDesignation;
+import eu.etaxonomy.cdm.model.name.Rank;
+import eu.etaxonomy.cdm.model.name.SpecimenTypeDesignation;
+import eu.etaxonomy.cdm.model.name.SpecimenTypeDesignationStatus;
+import eu.etaxonomy.cdm.model.name.TaxonName;
+import eu.etaxonomy.cdm.model.name.TaxonNameFactory;
+import eu.etaxonomy.cdm.model.name.TypeDesignationBase;
+import eu.etaxonomy.cdm.model.name.TypeDesignationStatusBase;
+import eu.etaxonomy.cdm.model.occurrence.DerivationEvent;
+import eu.etaxonomy.cdm.model.occurrence.DerivedUnit;
+import eu.etaxonomy.cdm.model.occurrence.FieldUnit;
+import eu.etaxonomy.cdm.model.occurrence.MediaSpecimen;
+import eu.etaxonomy.cdm.model.occurrence.SpecimenOrObservationType;
+import eu.etaxonomy.cdm.model.reference.Reference;
+import eu.etaxonomy.cdm.model.reference.ReferenceFactory;
+import eu.etaxonomy.cdm.ref.TypedEntityReference;
+import eu.etaxonomy.cdm.test.integration.CdmTransactionalIntegrationTest;
+/**
+ * @author a.kohlbecker, k.luther
+ * @since 03.09.2018
+ *
+ */
+public class TypeDesignationSetManagerTest extends CdmTransactionalIntegrationTest{
+
+        private NameTypeDesignation ntd;
+        private SpecimenTypeDesignation std_IT;
+        private SpecimenTypeDesignation std_HT;
+        private SpecimenTypeDesignation std_IT_2;
+        private SpecimenTypeDesignation std_IT_3;
+        private SpecimenTypeDesignation mtd_HT_published;
+        private SpecimenTypeDesignation mtd_IT_unpublished;
+
+        @Before
+        public void init(){
+
+            ntd = NameTypeDesignation.NewInstance();
+            ntd.setId(1);
+            TaxonName typeName = TaxonNameFactory.NewBacterialInstance(Rank.SPECIES());
+            typeName.setTitleCache("Prionus L.", true);
+            ntd.setTypeName(typeName);
+            Reference citation = ReferenceFactory.newGeneric();
+            citation.setTitleCache("Species Plantarum", true);
+            ntd.setCitation(citation);
+
+            FieldUnit fu_1 = FieldUnit.NewInstance();
+            fu_1.setId(1);
+            fu_1.setTitleCache("Testland, near Bughausen, A.Kohlbecker 81989, 2017", true);
+
+            FieldUnit fu_2 = FieldUnit.NewInstance();
+            fu_2.setId(2);
+            fu_2.setTitleCache("Dreamland, near Kissingen, A.Kohlbecker 66211, 2017", true);
+
+            std_HT = SpecimenTypeDesignation.NewInstance();
+            std_HT.setId(1);
+            DerivedUnit specimen_HT = DerivedUnit.NewInstance(SpecimenOrObservationType.PreservedSpecimen);
+            specimen_HT.setTitleCache("OHA", true);
+            createDerivationEvent(fu_1, specimen_HT);
+            specimen_HT.getOriginals().add(fu_1);
+            std_HT.setTypeSpecimen(specimen_HT);
+            std_HT.setTypeStatus(SpecimenTypeDesignationStatus.HOLOTYPE());
+
+            std_IT = SpecimenTypeDesignation.NewInstance();
+            std_IT.setId(2);
+            DerivedUnit specimen_IT = DerivedUnit.NewInstance(SpecimenOrObservationType.PreservedSpecimen);
+            specimen_IT.setTitleCache("BER", true);
+            createDerivationEvent(fu_1, specimen_IT);
+            std_IT.setTypeSpecimen(specimen_IT);
+            std_IT.setTypeStatus(SpecimenTypeDesignationStatus.ISOTYPE());
+
+            std_IT_2 = SpecimenTypeDesignation.NewInstance();
+            std_IT_2.setId(3);
+            DerivedUnit specimen_IT_2 = DerivedUnit.NewInstance(SpecimenOrObservationType.PreservedSpecimen);
+            specimen_IT_2.setTitleCache("KEW", true);
+            createDerivationEvent(fu_1, specimen_IT_2);
+            std_IT_2.setTypeSpecimen(specimen_IT_2);
+            std_IT_2.setTypeStatus(SpecimenTypeDesignationStatus.ISOTYPE());
+
+            std_IT_3 = SpecimenTypeDesignation.NewInstance();
+            std_IT_3.setId(4);
+            DerivedUnit specimen_IT_3 = DerivedUnit.NewInstance(SpecimenOrObservationType.PreservedSpecimen);
+            specimen_IT_3.setTitleCache("M", true);
+            createDerivationEvent(fu_2, specimen_IT_3);
+            std_IT_3.setTypeSpecimen(specimen_IT_3);
+            std_IT_3.setTypeStatus(SpecimenTypeDesignationStatus.ISOTYPE());
+
+            mtd_HT_published = SpecimenTypeDesignation.NewInstance();
+            mtd_HT_published.setId(5);
+            MediaSpecimen mediaSpecimen_published = (MediaSpecimen)DerivedUnit.NewInstance(SpecimenOrObservationType.Media);
+            Media media = Media.NewInstance();
+            Reference ref = ReferenceFactory.newGeneric();
+            ref.setTitleCache("A.K. & W.K (2008) Algae of the BGBM", true);
+            media.addSource(IdentifiableSource.NewPrimaryMediaSourceInstance(ref, "p.33"));
+            mediaSpecimen_published.setMediaSpecimen(media);
+            createDerivationEvent(fu_1, mediaSpecimen_published);
+            mtd_HT_published.setTypeSpecimen(mediaSpecimen_published);
+            mtd_HT_published.setTypeStatus(SpecimenTypeDesignationStatus.HOLOTYPE());
+
+            mtd_IT_unpublished = SpecimenTypeDesignation.NewInstance();
+            mtd_IT_unpublished.setId(6);
+            MediaSpecimen mediaSpecimen_unpublished = (MediaSpecimen)DerivedUnit.NewInstance(SpecimenOrObservationType.Media);
+            eu.etaxonomy.cdm.model.occurrence.Collection collection = eu.etaxonomy.cdm.model.occurrence.Collection.NewInstance();
+            collection.setCode("B");
+            mediaSpecimen_unpublished.setCollection(collection);
+            mediaSpecimen_unpublished.setAccessionNumber("Slide A565656");
+            createDerivationEvent(fu_1, mediaSpecimen_unpublished);
+            mtd_IT_unpublished.setTypeSpecimen(mediaSpecimen_unpublished);
+            mtd_IT_unpublished.setTypeStatus(SpecimenTypeDesignationStatus.ISOTYPE());
+
+        }
+
+        /**
+         * @param fu_1
+         * @param specimen_IT_2
+         */
+        protected void createDerivationEvent(FieldUnit fu_1, DerivedUnit specimen_IT_2) {
+            DerivationEvent derivationEvent_3 = DerivationEvent.NewInstance();
+            derivationEvent_3.addOriginal(fu_1);
+            derivationEvent_3.addDerivative(specimen_IT_2);
+        }
+
+        @Test
+        public void test1() throws RegistrationValidationException{
+
+            List<TypeDesignationBase> tds = new ArrayList<>();
+            tds.add(ntd);
+            tds.add(std_IT);
+            tds.add(std_HT);
+            tds.add(std_IT_2);
+            tds.add(std_IT_3);
+
+            TaxonName typifiedName = TaxonNameFactory.NewBacterialInstance(Rank.SPECIES());
+            typifiedName.setTitleCache("Prionus coriatius L.", true);
+
+            typifiedName.addTypeDesignation(ntd, false);
+            typifiedName.addTypeDesignation(std_HT, false);
+            typifiedName.addTypeDesignation(std_IT, false);
+            typifiedName.addTypeDesignation(std_IT_2, false);
+            typifiedName.addTypeDesignation(std_IT_3, false);
+
+            TypeDesignationSetManager typeDesignationManager = new TypeDesignationSetManager(tds);
+            String result = typeDesignationManager.print();
+
+            Logger.getLogger(this.getClass()).debug(result);
+            assertNotNull(result);
+            assertEquals(
+                    "Prionus coriatius L. Type: Dreamland, near Kissingen, A.Kohlbecker 66211, 2017 Isotype, M; Type: Testland, near Bughausen, A.Kohlbecker 81989, 2017 Holotype, OHA; Isotypes: BER, KEW; NameType: Prionus L. Species Plantarum"
+                    , result
+                    );
+
+            LinkedHashMap<TypedEntityReference, TypeDesignationWorkingSet> orderedTypeDesignations =
+                    typeDesignationManager.getOrderdTypeDesignationWorkingSets();
+            Iterator<TypeDesignationWorkingSet> byStatusMapIterator = orderedTypeDesignations.values().iterator();
+            Map<TypeDesignationStatusBase<?>, Collection<TypedEntityReference>> byStatusMap_1 = byStatusMapIterator.next();
+            Map<TypeDesignationStatusBase<?>, Collection<TypedEntityReference>> byStatusMap_2 = byStatusMapIterator.next();
+            Iterator<TypeDesignationStatusBase<?>> keyIt_1 = byStatusMap_1.keySet().iterator();
+            assertEquals("Isotype", keyIt_1.next().getLabel());
+            Iterator<TypeDesignationStatusBase<?>> keyIt_2 = byStatusMap_2.keySet().iterator();
+            assertEquals("Holotype", keyIt_2.next().getLabel());
+            assertEquals("Isotype", keyIt_2.next().getLabel());
+        }
+
+        @Test
+        public void test2() throws RegistrationValidationException{
+
+            TaxonName typifiedName = TaxonNameFactory.NewBacterialInstance(Rank.SPECIES());
+            typifiedName.setTitleCache("Prionus coriatius L.", true);
+
+            TypeDesignationSetManager typeDesignationManager = new TypeDesignationSetManager(typifiedName);
+            String result = typeDesignationManager.print();
+            Logger.getLogger(this.getClass()).debug(result);
+            assertNotNull(result);
+            assertEquals(
+                    "Prionus coriatius L."
+                    , result
+                    );
+
+            typifiedName.addTypeDesignation(ntd, false);
+            typeDesignationManager.addTypeDesigations(null, ntd);
+
+            assertEquals(
+                    "Prionus coriatius L. NameType: Prionus L. Species Plantarum"
+                    , typeDesignationManager.print()
+                    );
+
+            typifiedName.addTypeDesignation(std_HT, false);
+            typeDesignationManager.addTypeDesigations(null, std_HT);
+
+            assertEquals(
+                    "Prionus coriatius L. Type: Testland, near Bughausen, A.Kohlbecker 81989, 2017 Holotype, OHA; NameType: Prionus L. Species Plantarum"
+                    , typeDesignationManager.print()
+                    );
+
+        }
+
+        @Test
+        public void test_mediaType(){
+
+            for(int i = 0; i < 10; i++ ){
+
+                init();
+                // repeat 10 times to assure the order of typedesignations is fix in the representations
+                TaxonName typifiedName = TaxonNameFactory.NewBacterialInstance(Rank.SPECIES());
+                typifiedName.setTitleCache("Prionus coriatius L.", true);
+                typifiedName.addTypeDesignation(mtd_HT_published, false);
+                typifiedName.addTypeDesignation(mtd_IT_unpublished, false);
+
+                TypeDesignationSetManager typeDesignationManager = new TypeDesignationSetManager(typifiedName);
+                typeDesignationManager.addTypeDesigations(null, mtd_HT_published);
+                typeDesignationManager.addTypeDesigations(null, mtd_IT_unpublished);
+
+                assertEquals("failed after repreating " + i + " times",
+                        "Prionus coriatius L. Type: Testland, near Bughausen, A.Kohlbecker 81989, 2017 Holotype, [icon] p.33 in A.K. & W.K (2008) Algae of the BGBM; Isotype, [icon] (B Slide A565656)."
+                        , typeDesignationManager.print()
+                        );
+            }
+
+        }
+
+        /**
+         * {@inheritDoc}
+         */
+        @Override
+        public void createTestDataSet() throws FileNotFoundException {
+            // TODO Auto-generated method stub
+
+        }
+
+
+
+}
index 22991d7945281abb63e3669b00731b2fb376b754..bfa2c63a5670f303b8e7408157551cc01398fb8d 100644 (file)
@@ -15,7 +15,7 @@ import org.junit.Before;
 import org.junit.BeforeClass;
 import org.junit.Test;
 
-import eu.etaxonomy.cdm.api.service.dto.TaxonRelationshipsDTO.TaxonRelation;
+import eu.etaxonomy.cdm.api.service.dto.TaxonRelationshipsDTO.TaxonRelationDTO;
 import eu.etaxonomy.cdm.format.taxon.TaxonRelationshipFormatter;
 import eu.etaxonomy.cdm.model.agent.Person;
 import eu.etaxonomy.cdm.model.common.DefaultTermInitializer;
@@ -134,9 +134,9 @@ public class TaxonRelationshipsDTOTest {
 
         dto.addRelation(taxonRel, Direction.relatedFrom, languages);
         dto.addRelation(rel2, Direction.relatedFrom, languages);
-        TaxonRelation relToDuplicate = dto.addRelation(rel3, Direction.relatedFrom, languages);
+        TaxonRelationDTO relToDuplicate = dto.addRelation(rel3, Direction.relatedFrom, languages);
         dto.addRelation(rel4, Direction.relatedFrom, languages);
-        TaxonRelation duplicateWithoutRelSec2 = dto.addRelation(rel5, Direction.relatedFrom, languages);
+        TaxonRelationDTO duplicateWithoutRelSec2 = dto.addRelation(rel5, Direction.relatedFrom, languages);
 
         dto.createMisapplicationString();
 
index 06422b939939f507dc4b4c8797ec49dbc689abc0..aca1b94caa25e1d30498e39710bf027658facbe7 100644 (file)
@@ -45,9 +45,20 @@ public class QueryFactoryTest extends CdmIntegrationTest {
     public void testNewTermQuery_textfield_complex(){
 
         QueryFactory qf = new QueryFactory(luceneIndexToolProvider, Taxon.class);
+        Assert.assertEquals("ComplexPhraseQuery", qf.newTermQuery("titleCache", "\"Lact* *ennis\"", true).getClass().getSimpleName());
         Assert.assertEquals("ComplexPhraseQuery", qf.newTermQuery("titleCache", "\"Lactuca per*\"", true).getClass().getSimpleName());
+        Assert.assertEquals("ComplexPhraseQuery", qf.newTermQuery("titleCache", "\"Lact* perennis\"", true).getClass().getSimpleName());
+        Assert.assertEquals("ComplexPhraseQuery", qf.newTermQuery("titleCache", "\"Lact* per*\"", true).getClass().getSimpleName());
+        Assert.assertEquals("ComplexPhraseQuery", qf.newTermQuery("titleCache", "\"Lac*ca per*\"", true).getClass().getSimpleName());
+        Assert.assertEquals("ComplexPhraseQuery", qf.newTermQuery("titleCache", "\"Lac*ca perennis\"", true).getClass().getSimpleName());
+        Assert.assertEquals("ComplexPhraseQuery", qf.newTermQuery("titleCache", "\"Lactuca p*ennis\"", true).getClass().getSimpleName());
+        Assert.assertEquals("ComplexPhraseQuery", qf.newTermQuery("titleCache", "\"*ctuca perennis\"", true).getClass().getSimpleName());
+        Assert.assertEquals("ComplexPhraseQuery", qf.newTermQuery("titleCache", "\"*ctu* perennis\"", true).getClass().getSimpleName());
+        Assert.assertEquals("ComplexPhraseQuery", qf.newTermQuery("titleCache", "\"*ctu* *enn*\"", true).getClass().getSimpleName());
+        Assert.assertEquals("ComplexPhraseQuery", qf.newTermQuery("titleCache", "\"*ctuca per*\"", true).getClass().getSimpleName());
     }
 
+
     /**
      * {@inheritDoc}
      */
index a6f935e3fa1853f616822a0d456043dc5b1c3d87..3279aed9d0069eeb63c611666c43f681df501029 100644 (file)
@@ -17,7 +17,6 @@ import java.util.Arrays;
 import java.util.List;
 import java.util.UUID;
 
-import org.apache.log4j.Level;
 import org.apache.log4j.Logger;
 import org.hibernate.Query;
 import org.hibernate.Session;
@@ -422,7 +421,7 @@ public class Datasource {
                        UUID uuid2 = botName2.getUuid();
                        try {
                                Logger loggerTrace = Logger.getLogger("org.hibernate.type");
-                               loggerTrace.setLevel(Level.TRACE);
+                               //loggerTrace.setLevel(Level.TRACE);
                                System.out.println(logger.getName());
 
                                appCtr.getNameService().save(botName1);
index 4f94cd41b18fa42d8767638320ca2e952650de48..202cb429bedbc6e5becdd1a4494fb0d43a2e9059 100644 (file)
@@ -1,11 +1,16 @@
 <?xml version='1.0' encoding='UTF-8'?>
 <dataset>
+  
+  <CLASSIFICATION ID="5000" UUID="2a5ceebb-4830-4524-b330-78461bf8cb6b" ROOTNODE_ID="5000" PROTECTEDTITLECACHE="false" TITLECACHE="European Abies"    MICROREFERENCE="[null]" NAME_ID="5000"/>
+  <CLASSIFICATION ID="5001" UUID="d7c741e3-ae9e-4a7d-a566-9e3a7a0b51ce" ROOTNODE_ID="5001" PROTECTEDTITLECACHE="false" TITLECACHE="Abies alternative" MICROREFERENCE="[null]" NAME_ID="5001"/>
+  <CLASSIFICATION_AUD/>
     
   <TAXONNODE ID="5000" UUID="a8266e45-091f-432f-87ae-c625e6aa9bbc" TREEINDEX="#t5000#5000#" SORTINDEX="[null]" EXCLUDED="FALSE" UNPLACED="FALSE" COUNTCHILDREN="1" CLASSIFICATION_ID="5000" PARENT_ID="[null]" TAXON_ID="[null]"/>
-  <TAXONNODE ID="5001" UUID="1ff4255d-7c6c-4d01-aaae-7acc2cd3dda1" TREEINDEX="#t5001#5001#" SORTINDEX="[null]" EXCLUDED="FALSE" UNPLACED="FALSE" COUNTCHILDREN="1" CLASSIFICATION_ID="5001" PARENT_ID="[null]" TAXON_ID="[null]"/>
   <TAXONNODE ID="5002" UUID="bf379dec-349a-4b95-bb02-1d6bf785983b" TREEINDEX="#t5000#5000#5002#" SORTINDEX="0" EXCLUDED="FALSE" UNPLACED="FALSE" COUNTCHILDREN="0" CLASSIFICATION_ID="5000" PARENT_ID="5000" TAXON_ID="5003"/>
-  <TAXONNODE ID="5003" UUID="54f12949-9229-416c-9246-7bbc4d0f77a5" TREEINDEX="#t5001#5001#5003#" SORTINDEX="0" EXCLUDED="FALSE" UNPLACED="FALSE" COUNTCHILDREN="0" CLASSIFICATION_ID="5001" PARENT_ID="5001" TAXON_ID="5007"/>
-  
+  <TAXONNODE ID="5001" UUID="1ff4255d-7c6c-4d01-aaae-7acc2cd3dda1" TREEINDEX="#t5001#5001#" SORTINDEX="[null]" EXCLUDED="FALSE" UNPLACED="FALSE" COUNTCHILDREN="1" CLASSIFICATION_ID="5001" PARENT_ID="[null]" TAXON_ID="[null]"/>
+  <TAXONNODE ID="5003" UUID="54f12949-9229-416c-9246-7bbc4d0f77a5" TREEINDEX="#t5001#5001#5003#" SORTINDEX="0" EXCLUDED="FALSE" UNPLACED="FALSE" COUNTCHILDREN="1" CLASSIFICATION_ID="5001" PARENT_ID="5001" TAXON_ID="5007"/>
+  <!--  TAXONNODE ID="5004" UUID="55f12949-9229-416c-9246-7bbc4d0f77a5" TREEINDEX="#t5001#5001#5003#5004#" SORTINDEX="0" EXCLUDED="FALSE" UNPLACED="FALSE" COUNTCHILDREN="0" CLASSIFICATION_ID="5001" PARENT_ID="5003" TAXON_ID="5003"/>
+   -->
   <TAXONBASE DTYPE="Taxon"   ID="5000" UUID="3e72d306-0f83-4d4f-be84-6f85a604a2be" PROTECTEDTITLECACHE="false" TITLECACHE="Abies sec. Kohlbecker, A., Testcase standart views, 2013"            DOUBTFUL="false" publish="true"  USENAMECACHE="false" TAXONSTATUSUNKNOWN="false"  NAME_ID="5000" SEC_ID="5000"/>
   <TAXONBASE DTYPE="Taxon"   ID="5001" UUID="7dbd5810-a3e5-44b6-b563-25152b8867f4" PROTECTEDTITLECACHE="false" TITLECACHE="Abies alba sec. Kohlbecker, A., Testcase standart views, 2013"       DOUBTFUL="false" publish="true"  USENAMECACHE="false" TAXONSTATUSUNKNOWN="false"  NAME_ID="5001" SEC_ID="5000"/>
   <TAXONBASE DTYPE="Synonym" ID="5002" UUID="9fee273c-c819-4f1f-913a-cd910465df51" PROTECTEDTITLECACHE="false" TITLECACHE="Abies subalpina sec. Kohlbecker, A., Testcase standart views, 2013"  DOUBTFUL="false" publish="false" USENAMECACHE="false" TAXONSTATUSUNKNOWN="[null]" NAME_ID="5002" SEC_ID="5000" ACCEPTEDTAXON_ID="5003" TYPE_ID="848"/>
@@ -59,9 +64,6 @@
   <HOMOTYPICALGROUP ID="5004" UUID="9db11d08-706d-48da-bbf4-2fc74b106ad8" />
   <HOMOTYPICALGROUP ID="5005" UUID="63d7447a-2778-4224-8535-abbbf2d7b55c" />
   <HOMOTYPICALGROUP ID="5006" UUID="053bb99d-4679-483b-a7a1-7ecd2656a7db" />
-  <CLASSIFICATION ID="5000" UUID="2a5ceebb-4830-4524-b330-78461bf8cb6b" ROOTNODE_ID="5000" PROTECTEDTITLECACHE="false" TITLECACHE="European Abies"    MICROREFERENCE="[null]" NAME_ID="5000"/>
-  <CLASSIFICATION ID="5001" UUID="d7c741e3-ae9e-4a7d-a566-9e3a7a0b51ce" ROOTNODE_ID="5001" PROTECTEDTITLECACHE="false" TITLECACHE="Abies alternative" MICROREFERENCE="[null]" NAME_ID="5001"/>
-  <CLASSIFICATION_AUD/>
   
   <LANGUAGESTRING ID="5000" UUID="0ac3b18a-9f48-49cf-8e2d-52ac46a11afa" TEXT="European Abies" LANGUAGE_ID="406"/>
   <LANGUAGESTRING ID="5001" UUID="82d9ae61-3409-433c-8925-836a8739547b" TEXT="Abies alternative" LANGUAGE_ID="406"/>
index b2759de39fcde460a728a173e2b74ddc3a1df320..284867813b64ec7694904f1c7f9988b5cbdc6e78 100644 (file)
@@ -3,7 +3,7 @@
   <parent>
     <groupId>eu.etaxonomy</groupId>
     <artifactId>cdmlib-parent</artifactId>
-    <version>5.2.0</version>
+    <version>5.3.0</version>
     <relativePath>../pom.xml</relativePath>
   </parent>
 
diff --git a/pom.xml b/pom.xml
index 4b8ea53f3fdba36f6315309427b6a27079438873..9530dfb2d986cfb58ff5115fc25de71d7da4de08 100644 (file)
--- a/pom.xml
+++ b/pom.xml
@@ -3,7 +3,7 @@
   <modelVersion>4.0.0</modelVersion>
   <groupId>eu.etaxonomy</groupId>
   <artifactId>cdmlib-parent</artifactId>
-  <version>5.2.0</version>
+  <version>5.3.0</version>
   <name>CDM Library</name>
   <description>The Java implementation of the Common Data Model (CDM), the data model for EDIT's internet platform for cybertaxonomy.</description>
   <url>http://cybertaxonomy.eu/cdmlib/</url>
         <groupId>io.swagger</groupId>
         <artifactId>swagger-annotations</artifactId>
         <!-- should match the springfox-swagger2 depends on -->
-        <version>1.5.6</version>
+        <version>1.5.10</version>
       </dependency>
 
       <!-- dependencies for swagger-springmvc, added explicitely -->
       <dependency>
         <groupId>io.springfox</groupId>
         <artifactId>springfox-swagger2</artifactId>
-        <version>2.4.0</version>
+        <version>2.6.1</version>
       </dependency>
       
       <!-- ******* DATABASES DRIVER ******* -->