<parent>
<groupId>eu.etaxonomy</groupId>
<artifactId>cdmlib-parent</artifactId>
- <version>5.2.0</version>
+ <version>5.3.0</version>
<relativePath>../pom.xml</relativePath>
</parent>
/**
* @author cmathew
* @since 19 Feb 2015
- *
*/
public class CacheLoader {
private static final Logger logger = Logger.getLogger(CacheLoader.class);
private final Cache cdmlibModelCache;
-
public CacheLoader(ICdmCacher cdmCacher) {
this.cdmCacher = cdmCacher;
this.cdmlibModelCache = CdmRemoteCacheManager.getInstance().getCdmModelGetMethodsCache();
-
}
} else if (obj instanceof Collection) {
return (T) load((Collection<T>)obj, recursive, update);
} else if(obj instanceof Pager) {
- load(((Pager)obj).getRecords(), recursive, update);
+ load(((Pager)obj).getRecords(), recursive, update);
return obj;
} else if(obj instanceof MergeResult) {
return (T) load((MergeResult<CdmBase>)obj, recursive, update);
return (T) loadRecursive((MergeResult)obj, alreadyVisitedEntities, update);
}
-
- logger.info("No caching yet for type " + obj.getClass().getName());
+ if (logger.isInfoEnabled()){logger.info("No caching yet for type " + obj.getClass().getName());}
return obj;
}
public <T extends Object> Map<T,T> load(Map<T,T> map, boolean recursive, boolean update){
-
if(isRecursiveEnabled && recursive) {
- logger.debug("---- starting recursive load for cdm entity map");
- List<Object> alreadyVisitedEntities = new ArrayList<Object>();
+ if (logger.isDebugEnabled()){logger.debug("---- starting recursive load for cdm entity map");}
+ List<Object> alreadyVisitedEntities = new ArrayList<>();
Map<T,T> cachedMap = load(map, alreadyVisitedEntities, update);
alreadyVisitedEntities.clear();
- logger.debug("---- ending recursive load for cdm entity map \n");
+ if (logger.isDebugEnabled()){logger.debug("---- ending recursive load for cdm entity map \n");}
return cachedMap;
} else {
return load(map, null, update);
private <T extends Object> Map<T,T> load(Map<T,T> map, List<Object> alreadyVisitedEntities, boolean update){
- //map = (Map<T,T>)deproxy(map);
if(map == null || map.isEmpty()) {
return map;
}
- int originalMapSize = map.size();
Object[] result = new Object[ map.size() * 2 ];
Iterator<Map.Entry<T,T>> iter = map.entrySet().iterator();
int i=0;
// to avoid ConcurrentModificationException
- alreadyVisitedEntities.add(map);
+ if (alreadyVisitedEntities != null){
+ alreadyVisitedEntities.add(map);
+ }
while ( iter.hasNext() ) {
Map.Entry<T,T> e = iter.next();
result[i++] = e.getKey();
Collection<T> loadedCollection;
if(isRecursiveEnabled && recursive) {
- logger.debug("---- starting recursive load for cdm entity collection");
- List<Object> alreadyVisitedEntities = new ArrayList<Object>();
+ if (logger.isDebugEnabled()){logger.debug("---- starting recursive load for cdm entity collection");}
+ List<Object> alreadyVisitedEntities = new ArrayList<>();
Collection<T> cachedCollection = load(collection, alreadyVisitedEntities, update);
alreadyVisitedEntities.clear();
- logger.debug("---- ending recursive load for cdm entity collection \n");
+ if (logger.isDebugEnabled()){logger.debug("---- ending recursive load for cdm entity collection \n");}
loadedCollection = cachedCollection;
} else {
loadedCollection = load(collection, null, update);
@SuppressWarnings("unchecked")
private <T extends Object> Collection<T> load(Collection<T> collection, List<Object> alreadyVisitedEntities, boolean update) {
-
-
if(collection == null || collection.isEmpty()) {
return collection;
}
Iterator<T> collectionItr = collection.iterator();
int count = 0;
// to avoid ConcurrentModificationException
- alreadyVisitedEntities.add(collection);
+ if (alreadyVisitedEntities != null){
+ alreadyVisitedEntities.add(collection);
+ }
while(collectionItr.hasNext()) {
Object obj = collectionItr.next();
if(alreadyVisitedEntities == null) {
public MergeResult<CdmBase> load(MergeResult<CdmBase> mergeResult, boolean recursive, boolean update) {
CdmBase cdmBase = load(mergeResult.getMergedEntity(), recursive, update);
load(mergeResult.getNewEntities(), recursive, update);
- return new MergeResult(cdmBase, mergeResult.getNewEntities());
+ return new MergeResult<>(cdmBase, mergeResult.getNewEntities());
}
public MergeResult<CdmBase> loadRecursive(MergeResult<CdmBase> mergeResult,List<Object> alreadyVisitedEntities, boolean update) {
CdmBase cdmBase = loadRecursive(mergeResult.getMergedEntity(), alreadyVisitedEntities, update);
loadRecursive(mergeResult.getNewEntities(), alreadyVisitedEntities, update);
- return new MergeResult(cdmBase, mergeResult.getNewEntities());
+ return new MergeResult<>(cdmBase, mergeResult.getNewEntities());
}
/**
}
}
- CdmBase loadedCdmBase;
+ T loadedCdmBase;
if(isRecursiveEnabled && recursive) {
logger.debug("---- starting recursive load for cdm entity " + cdmEntity.getClass().getName() + " with id " + cdmEntity.getId());
List<Object> alreadyVisitedEntities = new ArrayList<Object>();
- CdmBase cb = loadRecursive(cdmEntity, alreadyVisitedEntities, update);
+ T cb = loadRecursive(cdmEntity, alreadyVisitedEntities, update);
alreadyVisitedEntities.clear();
logger.debug("---- ending recursive load for cdm entity " + cdmEntity.getClass().getName() + " with id " + cdmEntity.getId() + "\n");
loadedCdmBase = cb;
} else {
loadedCdmBase = load(cdmEntity);
}
- return (T) loadedCdmBase;
+ return loadedCdmBase;
}
- protected CdmBase load(CdmBase cdmEntity) {
+ protected <T extends CdmBase> T load(T cdmEntity) {
logger.debug("loading object of type " + cdmEntity.getClass().getName() + " with id " + cdmEntity.getId());
cdmCacher.put((CdmBase)ProxyUtils.deproxy(cdmEntity));
return cdmCacher.getFromCache(cdmEntity);
* them to the value of the cdm entity being loaded
* @return
*/
- private CdmBase loadRecursive(CdmBase cdmEntity, List<Object> alreadyVisitedEntities, boolean update) {
+ private <T extends CdmBase> T loadRecursive(T cdmEntity, List<Object> alreadyVisitedEntities, boolean update) {
- CdmBase cachedCdmEntity = load(cdmEntity);
+ T cachedCdmEntity = load(cdmEntity);
// we want to recursive through the cdmEntity (and not the cachedCdmEntity)
// since there could be new or deleted objects in the cdmEntity sub-graph
// start by getting the fields from the cdm entity
- CdmBase deproxiedEntity = (CdmBase)ProxyUtils.deproxyOrNull(cdmEntity);
+ //TODO improve generics for deproxyOrNull, probably need to split the method
+ T deproxiedEntity = (T)ProxyUtils.deproxyOrNull(cdmEntity);
if(deproxiedEntity != null){
String className = deproxiedEntity.getClass().getName();
- CdmModelFieldPropertyFromClass cmgmfc = getFromCdmlibModelCache(className);
- if(cmgmfc != null) {
+ CdmModelFieldPropertyFromClass cmfpfc = getFromCdmlibModelCache(className);
+ if(cmfpfc != null) {
alreadyVisitedEntities.add(cdmEntity);
- List<String> fields = cmgmfc.getFields();
+ List<String> fields = cmfpfc.getFields();
for(String field : fields) {
// retrieve the actual object corresponding to the field.
// this object will be either a CdmBase or a Collection / Map
throw new CdmClientCacheException("CdmEntity with class " + cdmEntity.getClass().getName() + " is not found in the cdmlib model cache. " +
"The cache may be corrupted or not in sync with the latest model version" );
}
- } else {
+ } else { //deproxiedEntity == null
logger.debug("ignoring uninitlialized proxy " + cdmEntity.getClass() + "#" + cdmEntity.getId());
}
import eu.etaxonomy.cdm.model.common.CdmBase;
-public class CdmEntityCacheKey {
+public class CdmEntityCacheKey<T extends CdmBase> {
- private Class<? extends CdmBase> persistenceClass;
+ private Class<T> persistenceClass;
private int persistenceId;
-
- public CdmEntityCacheKey(CdmBase cdmBase) {
- this.persistenceClass = cdmBase.getClass();
+
+
+ public CdmEntityCacheKey(T cdmBase) {
+ this.persistenceClass = (Class<T>)cdmBase.getClass();
this.persistenceId = cdmBase.getId();
}
-
- public CdmEntityCacheKey(Class<? extends CdmBase> clazz, int id) {
+
+ /**
+ * @param clazz
+ * @param uuid
+ */
+ public CdmEntityCacheKey(Class<T> clazz, int id) {
this.persistenceClass = clazz;
this.persistenceId = id;
}
-
-
- public Class<? extends CdmBase> getPersistenceClass() {
+
+
+ public Class<? extends T> getPersistenceClass() {
return persistenceClass;
}
-
+
public int getPersistenceId() {
return persistenceId;
}
if(obj == null || !(obj instanceof CdmEntityCacheKey)) {
return false;
}
-
+
if(this == obj) {
return true;
}
- CdmEntityCacheKey that = (CdmEntityCacheKey) obj;
- if(this.persistenceClass.equals(that.persistenceClass) && this.persistenceId == that.persistenceId) {
+ CdmEntityCacheKey<?> that = (CdmEntityCacheKey<?>) obj;
+ if(this.persistenceClass.equals(that.persistenceClass)
+ && this.persistenceId == that.persistenceId) {
return true;
}
-
+
return false;
}
-
+
@Override
public int hashCode() {
return (this.persistenceClass.getName() + String.valueOf(this.persistenceId)).hashCode();
}
-
+
@Override
public String toString() {
return this.persistenceClass.getName() + String.valueOf(this.persistenceId);
*/
package eu.etaxonomy.cdm.cache;
+import java.lang.management.ManagementFactory;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import java.util.UUID;
+import javax.management.MBeanServer;
+
import org.apache.log4j.Logger;
import eu.etaxonomy.cdm.api.cache.CdmCacher;
import net.sf.ehcache.Status;
import net.sf.ehcache.config.CacheConfiguration;
import net.sf.ehcache.config.SizeOfPolicyConfiguration;
+import net.sf.ehcache.management.ManagementService;
import net.sf.ehcache.statistics.LiveCacheStatistics;
/**
+ * This cache handle transient (id>0) and volatile (id=0) CdmBase object.
+ * Volatile objects need to be added via {@link #addNewEntity(CdmBase)}
+ * and there id is updated as soon as a transient object with same
+ * uuid is added to the cacher.
*
* This cache guarantees that
* - all objects put will be ancestors of CdmBase
* @since 14 Oct 2014
*
*/
-
public class CdmTransientEntityCacher implements ICdmCacher {
private static final Logger logger = Logger.getLogger(CdmTransientEntityCacher.class);
-
- // removed since unused ########################
- // private final eu.etaxonomy.cdm.session.ICdmEntitySessionManager cdmEntitySessionManager;
-
- /**
- * permanent cache which is usually used to cache terms permanently
- * FIXME rename to permanent cache
- */
- private static CdmCacher cdmCacher;
-
+ //the key for this cacher within the CacheManager
private final String cacheId;
+ //the cache
private final Cache cache;
+ //permanent cache which is usually used to cache terms permanently
+ private static CdmCacher permanentCache;
+
private final CacheLoader cacheLoader;
- private final Map<UUID, CdmBase> newEntitiesMap = new HashMap<UUID, CdmBase>();
+ //map for volatile entities (id=0)
+ private final Map<UUID, CdmBase> newEntitiesMap = new HashMap<>();
+
+ private static volatile boolean managementBeansConfigured = false;
+
+// ********************* CONSTRUCTOR **********************************/
public CdmTransientEntityCacher(String cacheId) {
this.cacheId = cacheId;
cache = new Cache(getEntityCacheConfiguration(cacheId));
- CacheManager.create().removeCache(cache.getName());
- CacheManager.create().addCache(cache);
-
- // removed since unused ########################
- // this.cdmEntitySessionManager = cdmEntitySessionManager;
+ createCacheManager().removeCache(cache.getName());
+ createCacheManager().addCache(cache);
cacheLoader = new CacheLoader(this);
-
}
public CdmTransientEntityCacher(Object sessionOwner) {
return sessionOwner.getClass().getName() + String.valueOf(sessionOwner.hashCode());
}
+//****************************** METHODS *********************************/
+
/**
* Returns the default cache configuration.
*
sizeOfConfig.setMaxDepthExceededBehavior("abort");
return new CacheConfiguration(cacheId, 0)
- .eternal(true)
- .statistics(true)
- .sizeOfPolicy(sizeOfConfig)
- .overflowToOffHeap(false);
+ .eternal(true)
+ .statistics(true)
+ .sizeOfPolicy(sizeOfConfig)
+ .overflowToOffHeap(false);
}
- public static void setDefaultCacher(CdmCacher css) {
- cdmCacher = css;
+ public static void setPermanentCacher(CdmCacher permanentCacher) {
+ permanentCache = permanentCacher;
}
public LiveCacheStatistics getCacheStatistics() {
* @return
*/
private Cache getCache() {
- return CacheManager.create().getCache(cacheId);
+ return createCacheManager().getCache(cacheId);
+ }
+
+ /**
+ * @return
+ */
+ protected CacheManager createCacheManager() {
+
+ CacheManager cacheManager = CacheManager.create();
+
+ if(!managementBeansConfigured){
+ MBeanServer mBeanServer = ManagementFactory.getPlatformMBeanServer();
+ boolean registerCacheManager = false;
+ boolean registerCaches = true;
+ boolean registerCacheConfigurations = false;
+ boolean registerCacheStatistics = true;
+ ManagementService.registerMBeans(cacheManager, mBeanServer, registerCacheManager, registerCaches, registerCacheConfigurations, registerCacheStatistics);
+ managementBeansConfigured = true;
+ }
+
+ return cacheManager;
}
public <T extends Object> T load(T obj, boolean update) {
}
/**
- * Puts the passed <code>cdmEntity</code> into the cache as long it does not yet exist in the caches.
+ * Puts the passed <code>cdmEntity</code> into the cache as long it does
+ * not yet exist in the caches.
* <p>
* The adjacent <b>ENTITY GRAPH WILL NOT BE LOADED RECURSIVELY</b>
*/
@Override
public void put(CdmBase cdmEntity) {
- CdmBase cachedCdmEntity = cdmCacher.load(cdmEntity);
+ CdmBase cachedCdmEntity = permanentCache.load(cdmEntity);
if(cachedCdmEntity != null) {
logger.info("Cdm Entity with id : " + cdmEntity.getId() + " already exists in permanent cache. Ignoring put.");
return;
}
- CdmEntityCacheKey id = new CdmEntityCacheKey(cdmEntity);
+ CdmEntityCacheKey<?> key = new CdmEntityCacheKey<>(cdmEntity);
- cachedCdmEntity = getFromCache(id);
+ cachedCdmEntity = getFromCache(key);
if(cachedCdmEntity == null) {
CdmBase cdmEntityToCache = cdmEntity;
CdmBase newEntity = newEntitiesMap.get(cdmEntity.getUuid());
newEntity.setId(cdmEntity.getId());
cdmEntityToCache = newEntity;
}
- getCache().put(new Element(id, cdmEntityToCache));
+ getCache().put(new Element(key, cdmEntityToCache));
cdmEntityToCache.initListener();
newEntitiesMap.remove(cdmEntity.getUuid());
- logger.debug(" - object of type " + cdmEntityToCache.getClass().getName() + " with id " + cdmEntityToCache.getId() + " put in cache");
+ if (logger.isDebugEnabled()){logger.debug(" - object of type " + cdmEntityToCache.getClass().getName() + " with id " + cdmEntityToCache.getId() + " put in cache");}
return;
}
logger.debug(" - object of type " + cdmEntity.getClass().getName() + " with id " + cdmEntity.getId() + " already exists");
}
- private Element getCacheElement(CdmEntityCacheKey key) {
+ private Element getCacheElement(CdmEntityCacheKey<?> key) {
return getCache().get(key);
}
- public CdmBase getFromCache(CdmEntityCacheKey id) {
+ public <T extends CdmBase> T getFromCache(CdmEntityCacheKey<T> id) {
Element e = getCacheElement(id);
if (e == null) {
return null;
} else {
- return (CdmBase) e.getObjectValue();
+ @SuppressWarnings("unchecked")
+ T result = (T) e.getObjectValue();
+ return result;
}
}
- public CdmBase getFromCache(Class<? extends CdmBase> clazz, int id) {
- CdmEntityCacheKey cacheId = generateKey(clazz,id);
+ public <T extends CdmBase> T getFromCache(Class<T> clazz, int id) {
+ CdmEntityCacheKey<T> cacheId = generateKey(clazz, id);
return getFromCache(cacheId);
}
@Override
public <T extends CdmBase> T getFromCache(T cdmBase) {
- CdmEntityCacheKey cacheId = generateKey((CdmBase)ProxyUtils.deproxy(cdmBase));
+ CdmEntityCacheKey<T> cacheId = generateKey((T)ProxyUtils.deproxy(cdmBase));
// first try this cache
- CdmBase cachedCdmEntity = getFromCache(cacheId);
+ T cachedCdmEntity = getFromCache(cacheId);
if(cachedCdmEntity == null) {
// ... then try the permanent cache
- cachedCdmEntity = cdmCacher.getFromCache(cdmBase.getUuid());
+ //TODO also use generics and clazz parameter for getFromCache(uuid)
+ cachedCdmEntity = (T)permanentCache.getFromCache(cdmBase.getUuid());
}
- return (T) cachedCdmEntity;
+ return cachedCdmEntity;
}
public CdmBase getFromCache(CdmBase cdmBase, Class<? extends CdmBase> clazz) {
}
public List<CdmBase> getAllEntities() {
- List<CdmBase> entities = new ArrayList<CdmBase>();
+ List<CdmBase> entities = new ArrayList<>();
Map<String, CdmBase> elementsMap = getCache().getAllWithLoader(getCache().getKeys(), null);
for (Map.Entry<String, CdmBase> entry : elementsMap.entrySet()) {
entities.add(entry.getValue());
return entities;
}
- public boolean exists(CdmEntityCacheKey key) {
+ public boolean exists(CdmEntityCacheKey<?> key) {
return (getCacheElement(key) != null);
}
- public boolean existsAndIsNotNull(CdmEntityCacheKey id) {
+ public boolean existsAndIsNotNull(CdmEntityCacheKey<?> id) {
return getFromCache(id) != null;
}
}
public void dispose() {
- CacheManager.create().removeCache(cache.getName());
+ createCacheManager().removeCache(cache.getName());
cache.dispose();
newEntitiesMap.clear();
-
}
- public static CdmEntityCacheKey generateKey(Class<? extends CdmBase> clazz, int id) {
- return new CdmEntityCacheKey(clazz, id);
+ public static <T extends CdmBase> CdmEntityCacheKey<T> generateKey(Class<T> clazz, int id) {
+ return new CdmEntityCacheKey<T>(clazz, id);
}
- public static CdmEntityCacheKey generateKey(CdmBase cdmBase) {
- Class<? extends CdmBase> entityClass = cdmBase.getClass();
- int id = cdmBase.getId();
- return new CdmEntityCacheKey(entityClass, id);
+ public static <T extends CdmBase> CdmEntityCacheKey<T> generateKey(T cdmBase) {
+ Class<T> entityClass = (Class<T>)cdmBase.getClass();
+ return new CdmEntityCacheKey<T>(entityClass, cdmBase.getId());
}
@Override
<parent>
<groupId>eu.etaxonomy</groupId>
<artifactId>cdmlib-parent</artifactId>
- <version>5.2.0</version>
+ <version>5.3.0</version>
<relativePath>../pom.xml</relativePath>
</parent>
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
+import java.util.List;
import java.util.Locale;
+import java.util.Map;
import org.apache.log4j.Logger;
import org.apache.poi.hssf.usermodel.HSSFCell;
private static final Logger logger = Logger.getLogger(ExcelUtils.class);
/** Reads all rows of an Excel worksheet */
- public static ArrayList<HashMap<String, String>> parseXLS(URI uri) throws FileNotFoundException {
+ public static List<Map<String, String>> parseXLS(URI uri) throws FileNotFoundException {
return parseXLS(uri, null);
}
/** Reads all rows of an Excel worksheet */
- public static ArrayList<HashMap<String, String>> parseXLS(URI uri, String worksheetName) throws FileNotFoundException {
- InputStream stream;
+ public static List<Map<String, String>> parseXLS(URI uri, String worksheetName) throws FileNotFoundException {
try {
- stream = UriUtils.getInputStream(uri);
+ InputStream stream = UriUtils.getInputStream(uri);
return parseXLS(stream, worksheetName);
} catch(FileNotFoundException fne) {
throw new FileNotFoundException(uri.toString());
} catch(Exception ioe) {
- logger.error("Error reading the Excel file." + uri.toString());
+ String message = "Error reading the Excel file." + uri.toString();
+ logger.error(message);
ioe.printStackTrace();
+ throw new RuntimeException(message);
}
- return null;
}
- /** Reads all rows of an Excel worksheet */
- public static ArrayList<HashMap<String, String>> parseXLS(InputStream stream, String worksheetName) throws FileNotFoundException {
+ /** Reads all rows of an Excel worksheet */
+ public static List<Map<String, String>> parseXLS(InputStream stream, String worksheetName) {
- ArrayList<HashMap<String, String>> recordList = new ArrayList<HashMap<String, String>>();
+ List<Map<String, String>> recordList = new ArrayList<>();
try {
// POIFSFileSystem fs = new POIFSFileSystem(UriUtils.getInputStream(uri));
// HSSFWorkbook wb = new HSSFWorkbook(fs);
-
Workbook wb = WorkbookFactory.create(stream);
-
Sheet sheet;
if (worksheetName == null){
sheet = wb.getSheetAt(0);
}
}
-
//first row
- ArrayList<String> columns = new ArrayList<String>();
+ List<String> columns = new ArrayList<>();
row = sheet.getRow(0);
for (int c = 0; c < cols; c++){
cell = row.getCell(c);
if(cell != null) {
- columns.add(cell.toString());
- if(logger.isDebugEnabled()) { logger.debug("Cell #" + c + ": " + cell.toString()); }
+ String str = cell.toString();
+ str = (str == null)? null : str.trim();
+ //TODO better make case sensitive, but need to adapt all existing imports for this
+ columns.add(str);
+ if(logger.isDebugEnabled()) { logger.debug("Cell #" + c + ": " + str); }
} else {
if(logger.isDebugEnabled()) { logger.debug("Cell #" + c + " is null"); }
}
//value rows
for(int r = 1; r < rows; r++) {
row = sheet.getRow(r);
- HashMap<String, String> headers = new HashMap<String, String>();
+ Map<String, String> headers = new HashMap<>();
boolean notEmpty = checkIsEmptyRow(row);
if(notEmpty) {
for(int c = 0; c < cols; c++) {
/**\r
* Copyright (C) 2009 EDIT\r
-* European Distributed Institute of Taxonomy \r
+* European Distributed Institute of Taxonomy\r
* http://www.e-taxonomy.eu\r
-* \r
+*\r
* The contents of this file are subject to the Mozilla Public License Version 1.1\r
* See LICENSE.TXT at the top of this package for the full license terms.\r
*/\r
package eu.etaxonomy.cdm.common;\r
\r
+import java.io.File;\r
+import java.io.FileOutputStream;\r
import java.io.IOException;\r
import java.io.InputStream;\r
import java.io.InputStreamReader;\r
import java.io.StringBufferInputStream;\r
+import java.net.HttpURLConnection;\r
+import java.net.URL;\r
\r
import org.apache.log4j.Logger;\r
\r
*\r
*/\r
public class StreamUtils {\r
- \r
+\r
public static final Logger logger = Logger.getLogger(StreamUtils.class);\r
- \r
+ private static final int BUFFER_SIZE = 4096;\r
+\r
/**\r
- * Replaces each substring of this stream that matches the literal search sequence with the specified literal replace sequence. \r
+ * Replaces each substring of this stream that matches the literal search sequence with the specified literal replace sequence.\r
* The replacement proceeds from the beginning of the stream to the end, for example, replacing "aa" with "b" in the string "aaa" will result in "ba" rather than "ab".\r
- * \r
- * @param stream \r
+ *\r
+ * @param stream\r
* @param search The sequence of char values to be replaced\r
* @param replace The replacement sequence of char values\r
* @return\r
* @throws IOException\r
- * \r
+ *\r
*/\r
public static InputStream streamReplace(InputStream stream, String search, String replace) throws IOException {\r
InputStreamReader reader = new InputStreamReader(stream);\r
StringBuilder strBuilder = new StringBuilder();\r
- \r
+\r
char[] cbuf = new char[1024];\r
int charsRead = -1;\r
while ((charsRead = reader.read(cbuf)) > -1){\r
strBuilder.append(cbuf, 0, charsRead);\r
}\r
- String replacedContent = strBuilder.toString().replace(search, replace); \r
+ String replacedContent = strBuilder.toString().replace(search, replace);\r
StringBufferInputStream replacedStream = new StringBufferInputStream(replacedContent); //TODO replace with StringReader\r
logger.debug(replacedContent);\r
return replacedStream;\r
}\r
- \r
+\r
public static InputStream streamReplaceAll(InputStream stream, String regex, String replace) throws IOException {\r
InputStreamReader reader = new InputStreamReader(stream);\r
StringBuilder strBuilder = new StringBuilder();\r
- \r
+\r
char[] cbuf = new char[1024];\r
int charsRead = -1;\r
while ((charsRead = reader.read(cbuf)) > -1){\r
strBuilder.append(cbuf, 0, charsRead);\r
}\r
- String replacedContent = strBuilder.toString().replaceAll(regex, replace); \r
+ String replacedContent = strBuilder.toString().replaceAll(regex, replace);\r
StringBufferInputStream replacedStream = new StringBufferInputStream(replacedContent); //TODO replace with StringReader\r
logger.debug(replacedContent);\r
return replacedStream;\r
}\r
- \r
+\r
public static String readToString(InputStream stream) throws IOException {\r
InputStreamReader reader = new InputStreamReader(stream);\r
StringBuilder strBuilder = new StringBuilder();\r
- \r
+\r
char[] cbuf = new char[1024];\r
int charsRead = -1;\r
while ((charsRead = reader.read(cbuf)) > -1){\r
return strBuilder.toString();\r
}\r
\r
+ public static void downloadFile(URL url, String saveDir)\r
+ throws IOException {\r
+\r
+ HttpURLConnection httpConn = (HttpURLConnection) url.openConnection();\r
+ int responseCode = httpConn.getResponseCode();\r
+\r
+ // always check HTTP response code first\r
+ if (responseCode == HttpURLConnection.HTTP_OK) {\r
+ String fileName = "";\r
+ String disposition = httpConn.getHeaderField("Content-Disposition");\r
+\r
+ if (disposition != null) {\r
+ // extracts file name from header field\r
+ int index = disposition.indexOf("filename=");\r
+ if (index > 0) {\r
+ fileName = disposition.substring(index + 10,\r
+ disposition.length() - 1);\r
+ }\r
+ } else {\r
+ // extracts file name from URL\r
+ fileName = url.getFile().toString().substring(url.getFile().lastIndexOf("/") + 1,\r
+ url.getFile().length());\r
+ }\r
+\r
+ // opens input stream from the HTTP connection\r
+ InputStream inputStream = httpConn.getInputStream();\r
+ String saveFilePath = saveDir + File.separator + fileName;\r
+\r
+ // opens an output stream to save into file\r
+ FileOutputStream outputStream = new FileOutputStream(saveFilePath);\r
+\r
+ int bytesRead = -1;\r
+ byte[] buffer = new byte[BUFFER_SIZE];\r
+ while ((bytesRead = inputStream.read(buffer)) != -1) {\r
+ outputStream.write(buffer, 0, bytesRead);\r
+ }\r
+\r
+ outputStream.close();\r
+ inputStream.close();\r
+\r
+\r
+ } else {\r
+ logger.error("No file to download. Server replied HTTP code: " + responseCode);\r
+ }\r
+ httpConn.disconnect();\r
+ }\r
+\r
+\r
}\r
ACUTE_ACCENT("\u00B4"), //Acute Accent, looks a bit similar to th single quotation mark
BLACK_CIRCLE("\u25CF"), //Black circle, symbol for endemic
DEGREE_SIGN("\u00B0"), //°
+ NARROW_NO_BREAK("\u202F")
;
private String value;
<parent>
<groupId>eu.etaxonomy</groupId>
<artifactId>cdmlib-parent</artifactId>
- <version>5.2.0</version>
+ <version>5.3.0</version>
<relativePath>../pom.xml</relativePath>
</parent>
<parent>
<groupId>eu.etaxonomy</groupId>
<artifactId>cdmlib-parent</artifactId>
- <version>5.2.0</version>
+ <version>5.3.0</version>
<relativePath>../pom.xml</relativePath>
</parent>
*/
package eu.etaxonomy.cdm.ext.geo;
-import java.io.File;
import java.io.IOException;
import java.io.Reader;
import java.util.HashMap;
import java.util.Set;
import org.apache.commons.lang.ArrayUtils;
-import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import au.com.bytecode.opencsv.CSVReader;
*/
public Map<NamedArea, String> readCsv(Reader reader, List<String> idSearchFields, String wmsLayerName) throws IOException {
- logger.setLevel(Level.DEBUG);
+ //logger.setLevel(Level.DEBUG);
Map<NamedArea, String> resultMap = new HashMap<>(areas.size());
<parent>
<groupId>eu.etaxonomy</groupId>
<artifactId>cdmlib-parent</artifactId>
- <version>5.2.0</version>
+ <version>5.3.0</version>
<relativePath>../pom.xml</relativePath>
</parent>
import org.apache.commons.lang3.StringUtils;
import org.springframework.stereotype.Component;
+import eu.etaxonomy.cdm.api.service.name.TypeDesignationSetManager;
import eu.etaxonomy.cdm.common.CdmUtils;
import eu.etaxonomy.cdm.common.monitor.IProgressMonitor;
import eu.etaxonomy.cdm.filter.TaxonNodeFilter;
import eu.etaxonomy.cdm.model.media.MediaRepresentationPart;
import eu.etaxonomy.cdm.model.name.HomotypicalGroup;
import eu.etaxonomy.cdm.model.name.HomotypicalGroupNameComparator;
-import eu.etaxonomy.cdm.model.name.NameTypeDesignation;
import eu.etaxonomy.cdm.model.name.NomenclaturalStatus;
import eu.etaxonomy.cdm.model.name.Rank;
import eu.etaxonomy.cdm.model.name.SpecimenTypeDesignation;
import eu.etaxonomy.cdm.model.taxon.TaxonNode;
import eu.etaxonomy.cdm.model.taxon.TaxonRelationship;
import eu.etaxonomy.cdm.model.taxon.TaxonRelationshipType;
+import eu.etaxonomy.cdm.strategy.cache.TagEnum;
+import eu.etaxonomy.cdm.strategy.cache.TaggedText;
+import eu.etaxonomy.cdm.strategy.cache.reference.DefaultReferenceCacheStrategy;
import eu.etaxonomy.cdm.strategy.exceptions.UnknownCdmTypeException;
/**
HomotypicalGroup group =name.getHomotypicalGroup();
if (state.getHomotypicalGroupFromStore(group.getId()) == null){
- handleHomotypicalGroup(state, group);
+ handleHomotypicalGroup(state, HibernateProxyHelper.deproxy(group, HomotypicalGroup.class));
}
csvLine[table.getIndex(CdmLightExportTable.HOMOTYPIC_GROUP_FK)] = getId(state, group);
List<TaxonName> typifiedNames = new ArrayList<>();
*/
private void handleHomotypicalGroup(CdmLightExportState state, HomotypicalGroup group) {
try {
- state.addHomotypicalGroupToStore(group);
+ state.addHomotypicalGroupToStore(group);
CdmLightExportTable table = CdmLightExportTable.HOMOTYPIC_GROUP;
String[] csvLine = new String[table.getSize()];
}else{
csvLine[table.getIndex(CdmLightExportTable.HOMOTYPIC_GROUP_STRING)] = "";
}
- Set<TypeDesignationBase> typeDesigantions = group.getTypeDesignations();
+ Set<TypeDesignationBase> typeDesigantionSet = group.getTypeDesignations();
List<TypeDesignationBase> designationList = new ArrayList<>();
- designationList.addAll(typeDesigantions);
+ designationList.addAll(typeDesigantionSet);
Collections.sort(designationList, new TypeComparator());
StringBuffer typeDesignationString = new StringBuffer();
- for (TypeDesignationBase typeDesignation: typeDesigantions){
+ List<TaggedText> list = new ArrayList<TaggedText>();
+ if (!designationList.isEmpty()){
+ TypeDesignationSetManager manager = new TypeDesignationSetManager(group);
+
+ list.addAll( manager.toTaggedTextWithCitation());
+ System.err.println(list.toString());
+ }
+ StringBuffer homotypicalGroupTypeDesignationString = new StringBuffer();
+
+ for (TaggedText text:list){
+ if (text != null && text.getText() != null && (text.getText().equals("Type:") || text.getText().equals("NameType:"))){
+ //do nothing
+ } else if (text.getType().equals(TagEnum.reference)){
+ homotypicalGroupTypeDesignationString.append(text.getText());
+ }else if (text.getType().equals(TagEnum.typeDesignation)){
+ homotypicalGroupTypeDesignationString.append(text.getText().replace(").", "").replace("(", "").replace(")", "") );
+ } else{
+ homotypicalGroupTypeDesignationString.append(text.getText());
+ }
+ }
+
+
+ String typeDesignations= homotypicalGroupTypeDesignationString.toString();
+ typeDesignations = typeDesignations.trim();
+
+ typeDesignations += ".";
+ typeDesignations = typeDesignations.replace("..", ".");
+ typeDesignations = typeDesignations.replace(". .", ".");
+ if (typeDesignations.equals(".")){
+ typeDesignations = null;
+ }
+ System.err.println(typeDesignations);
+ /* for (TypeDesignationBase typeDesignation: designationList){
+ //[Vorschlag Soll:]
+ // Sumatra Utara, Kab. Karo, around Sidikalang areas, 1000─1500 m, Dec 11, 2003, Nepenthes Team (Hernawati, P. Akhriadi & I. Petra), NP 354 (‘ANDA’–Holo, BO–Iso) [fide Akhriadi & al. 2004]
if (typeDesignation != null && typeDesignation.getTypeStatus() != null){
- typeDesignationString.append(typeDesignation.getTypeStatus().getTitleCache() + ": ");
+ typeDesignationString.append(typeDesignation.getTypeStatus().getTitleCache() + ":");
}
if (typeDesignation instanceof SpecimenTypeDesignation){
if (((SpecimenTypeDesignation)typeDesignation).getTypeSpecimen() != null){
- typeDesignationString.append(((SpecimenTypeDesignation)typeDesignation).getTypeSpecimen().getTitleCache());
+ typeDesignationString.append(" "+((SpecimenTypeDesignation)typeDesignation).getTypeSpecimen().getTitleCache());
+ if (typeDesignationString.lastIndexOf(".") == typeDesignationString.length()){
+ typeDesignationString.deleteCharAt(typeDesignationString.lastIndexOf("."));
+ }
handleSpecimen(state, ((SpecimenTypeDesignation)typeDesignation).getTypeSpecimen());
}
}else{
}
}
if(typeDesignation.getCitation() != null ){
- typeDesignationString.append(", "+typeDesignation.getCitation().getTitleCache());
+ typeDesignationString.append(" [fide " + ((DefaultReferenceCacheStrategy)typeDesignation.getCitation().getCacheStrategy()).createShortCitation(typeDesignation.getCitation()) +"]");
}
//TODO...
/*
Aufbau der Typusinformationen:
Land: Lokalität mit Höhe und Koordinaten; Datum; Sammler Nummer (Herbar/Barcode, Typusart; Herbar/Barcode, Typusart …)
- */
- }
- String typeDesignations = typeDesignationString.toString();
+
+ }*/
+ // typeDesignations = typeDesignationString.toString();
if (typeDesignations != null){
+ if (!typeDesignations.endsWith(".") ){
+ typeDesignations =typeDesignations + ".";
+ }
csvLine[table.getIndex(CdmLightExportTable.TYPE_STRING)] = typeDesignations;
+
}else{
csvLine[table.getIndex(CdmLightExportTable.TYPE_STRING)] = "";
}
state.getProcessor().put(table, String.valueOf(group.getId()), csvLine);
} catch (Exception e) {
+ e.printStackTrace();
state.getResult().addException(e, "An unexpected error occurred when handling homotypic group " +
cdmBaseStr(group) + ": " + e.getMessage());
}
String[] csvLine = new String[table.getSize()];
csvLine[table.getIndex(CdmLightExportTable.REFERENCE_ID)] = getId(state, reference);
//TODO short citations correctly
- String shortCitation = createShortCitation(reference); //Should be Author(year) like in Taxon.sec
+ String shortCitation = ((DefaultReferenceCacheStrategy)reference.getCacheStrategy()).createShortCitation(reference); //Should be Author(year) like in Taxon.sec
csvLine[table.getIndex(CdmLightExportTable.BIBLIO_SHORT_CITATION)] = shortCitation;
//TODO get preferred title
csvLine[table.getIndex(CdmLightExportTable.REF_TITLE)] = reference.getTitle();
}
- /**
- * @param reference
- * @return
- */
- private String createShortCitation(Reference reference) {
- TeamOrPersonBase<?> authorship = reference.getAuthorship();
- String shortCitation = "";
- if (authorship == null) {
- return null;
- }
- authorship = HibernateProxyHelper.deproxy(authorship);
- if (authorship instanceof Person){
- shortCitation = ((Person)authorship).getFamilyName();
- if (StringUtils.isBlank(shortCitation) ){
- shortCitation = ((Person)authorship).getTitleCache();
- }
- }
- else if (authorship instanceof Team){
- Team authorTeam = HibernateProxyHelper.deproxy(authorship, Team.class);
- int index = 0;
-
- for (Person teamMember : authorTeam.getTeamMembers()){
- index++;
- if (index == 3){
- shortCitation += " & al.";
- break;
- }
- String concat = concatString(authorTeam, authorTeam.getTeamMembers(), index);
- if (teamMember.getFamilyName() != null){
- shortCitation += concat + teamMember.getFamilyName();
- }else{
- shortCitation += concat + teamMember.getTitleCache();
- }
-
- }
- if (StringUtils.isBlank(shortCitation)){
- shortCitation = authorTeam.getTitleCache();
- }
-
- }
- if (!StringUtils.isBlank(reference.getDatePublished().getFreeText())){
- shortCitation = shortCitation + " (" + reference.getDatePublished().getFreeText() + ")";
- }else if (!StringUtils.isBlank(reference.getYear()) ){
- shortCitation = shortCitation + " (" + reference.getYear() + ")";
- }
- return shortCitation;
- }
/**
* @param reference
this.exportType = exportType;
}
+ /**
+ * @param report
+ */
+ @Override
+ protected void addShortDescription(StringBuffer report) {
+ if (this.isSuccess()){
+ report.append("\n" + "Export was successfull.");
+ }
+
+ if (!this.isSuccess()){
+ report.append("\n" + "Export had some problems.");
+ }
+
+ }
+
+
+
}
import eu.etaxonomy.cdm.io.common.TaxonNodeOutStreamPartitioner;
import eu.etaxonomy.cdm.io.common.XmlExportState;
import eu.etaxonomy.cdm.model.common.CdmBase;
-import eu.etaxonomy.cdm.model.common.DefinedTermBase;
import eu.etaxonomy.cdm.model.common.Language;
import eu.etaxonomy.cdm.model.description.DescriptionBase;
import eu.etaxonomy.cdm.model.description.DescriptionElementBase;
if (state.getNotesFeature() != null){
return state.getNotesFeature();
} else{
- Pager<DefinedTermBase> notesFeature = getTermService().findByTitleWithRestrictions(Feature.class, "Notes" ,MatchMode.EXACT, null, null, null, null, null);
+ Pager<Feature> notesFeature = getTermService().findByTitleWithRestrictions(Feature.class, "Notes" ,MatchMode.EXACT, null, null, null, null, null);
if (notesFeature.getRecords().size() == 0){
return null;
}else{
- DefinedTermBase<?> feature= notesFeature.getRecords().iterator().next();
- if (feature instanceof Feature){
- state.setNotesFeature((Feature)feature);
- return (Feature) feature;
- } else{
- return null;
- }
+ Feature feature= notesFeature.getRecords().iterator().next();
+ state.setNotesFeature(feature);
+ return feature;
}
}
* {@inheritDoc}
*/
@Override
- protected void analyzeRecord(HashMap<String, String> record, ExcelDistributionUpdateState state) {
+ protected void analyzeRecord(Map<String, String> record, ExcelDistributionUpdateState state) {
// nothing to do
}
*/
@Override
protected void firstPass(ExcelDistributionUpdateState state) {
- HashMap<String, String> record = state.getOriginalRecord();
+ Map<String, String> record = state.getOriginalRecord();
String line = state.getCurrentLine() + ": ";
String taxonUuid = getValue(record, "taxon_uuid");
String taxonName = getValue(record, "Taxonname");
* @param record
* @param line
*/
- private void handleAreasForTaxon(ExcelDistributionUpdateState state, Taxon taxon, HashMap<String, String> record,
+ private void handleAreasForTaxon(ExcelDistributionUpdateState state, Taxon taxon, Map<String, String> record,
String line) {
ImportResult result = state.getResult();
Map<NamedArea, Set<Distribution>> existingDistributions = getExistingDistributions(state, taxon, line);
}
private Map<NamedArea, Distribution> getNewDistributions(ExcelDistributionUpdateState state,
- HashMap<String, String> record, String line) {
+ Map<String, String> record, String line) {
Map<NamedArea, Distribution> result = new HashMap<>();
import java.io.ByteArrayInputStream;
import java.io.FileNotFoundException;
import java.net.URI;
-import java.util.ArrayList;
-import java.util.HashMap;
+import java.util.List;
import java.util.Map;
import java.util.UUID;
protected static final String SCIENTIFIC_NAME_COLUMN = "ScientificName";
- private ArrayList<HashMap<String, String>> recordList = null;
+ private List<Map<String, String>> recordList = null;
private ExcelImportConfiguratorBase configurator = null;
source = state.getConfig().getSource();
}
-
-
- String sheetName = getWorksheetName();
-
+ String sheetName = getWorksheetName(state.getConfig());
if (data != null){
try {
ByteArrayInputStream stream = new ByteArrayInputStream(data);
recordList = ExcelUtils.parseXLS(stream, sheetName);
- } catch (FileNotFoundException e) {
+ } catch (Exception e) {
throw new RuntimeException(e);
}
}else{
private void handleRecordList(STATE state, URI source) {
Integer startingLine = 2;
if (recordList != null) {
- HashMap<String,String> record = null;
+ Map<String,String> record = null;
TransactionStatus txStatus = startTransaction();
}
/**
- * To define a worksheet name override this method. Otherwise the first worksheet is taken.
+ * To define a worksheet name other then the one defined in the configurator
+ * override this method with a non <code>null</code> return value.
+ * If <code>null</code> is returned the first worksheet is taken.
+
* @return worksheet name. <code>null</null> if no worksheet is defined.
*/
- protected String getWorksheetName() {
- return null;
+ protected String getWorksheetName(CONFIG config) {
+ return config.getWorksheetName();
}
@Override
* @param record
* @return
*/
- protected abstract void analyzeRecord(HashMap<String,String> record, STATE state);
+ protected abstract void analyzeRecord(Map<String,String> record, STATE state);
protected abstract void firstPass(STATE state);
protected abstract void secondPass(STATE state);
String colNameCache, String colNameTitleCache, String colTaxonTitleCache,
Class<T> clazz, String line) {
- HashMap<String, String> record = state.getOriginalRecord();
+ Map<String, String> record = state.getOriginalRecord();
String strUuidTaxon = record.get(colTaxonUuid);
if (strUuidTaxon != null){
UUID uuidTaxon;
return CdmBase.deproxy(result, clazz);
}else{
- String message = "No taxon identifier found";
+ String message = "No taxon identifier column found";
state.getResult().addWarning(message, null, line);
return null;
}
* @see #getTaxonByCdmId(ExcelImportState, String, String, String, String, Class, String)
*/
protected void verifyName(STATE state, String colNameCache, String colNameTitleCache, String colTaxonTitleCache,
- String line, HashMap<String, String> record, TaxonBase<?> result) {
+ String line, Map<String, String> record, TaxonBase<?> result) {
//nameCache
String strExpectedNameCache = record.get(colNameCache);
String nameCache = result.getName() == null ? null : result.getName().getNameCache();
- if (isNotBlank(strExpectedNameCache) && (!strExpectedNameCache.equals(nameCache))){
+ if (isNotBlank(strExpectedNameCache) && (!strExpectedNameCache.trim().equals(nameCache))){
String message = "Name cache (%s) does not match expected name (%s)";
message = String.format(message, nameCache==null? "null":nameCache, strExpectedNameCache);
state.getResult().addWarning(message, null, line);
//name title
String strExpectedNameTitleCache = record.get(colNameTitleCache);
String nameTitleCache = result.getName() == null ? null : result.getName().getTitleCache();
- if (isNotBlank(strExpectedNameTitleCache) && (!strExpectedNameTitleCache.equals(nameTitleCache))){
+ if (isNotBlank(strExpectedNameTitleCache) && (!strExpectedNameTitleCache.trim().equals(nameTitleCache))){
String message = "Name title cache (%s) does not match expected name (%s)";
message = String.format(message, nameTitleCache==null? "null":nameTitleCache, strExpectedNameTitleCache);
state.getResult().addWarning(message, null, line);
//taxon title cache
String strExpectedTaxonTitleCache = record.get(colTaxonTitleCache);
String taxonTitleCache = result.getTitleCache();
- if (isNotBlank(strExpectedTaxonTitleCache) && (!strExpectedTaxonTitleCache.equals(taxonTitleCache))){
+ if (isNotBlank(strExpectedTaxonTitleCache) && (!strExpectedTaxonTitleCache.trim().equals(taxonTitleCache))){
String message = "Name cache (%s) does not match expected name (%s)";
message = String.format(message, taxonTitleCache==null? "null":taxonTitleCache, strExpectedTaxonTitleCache);
state.getResult().addWarning(message, null, line);
private byte[] stream;\r
private boolean deduplicateReferences = false;\r
private boolean deduplicateAuthors = false;\r
+\r
+ private String worksheetName = null;\r
+\r
/**\r
* @param url\r
* @param destination\r
}\r
public byte[] getStream(){\r
return stream;\r
- }\r
-\r
-\r
- public void setStream(byte[] stream) {\r
- this.stream = stream;\r
- }\r
-\r
- /**\r
- * @return the deduplicateReferences\r
- */\r
- public boolean isDeduplicateReferences() {\r
- return deduplicateReferences;\r
- }\r
-\r
- /**\r
- * @param deduplicateReferences the deduplicateReferences to set\r
- */\r
- public void setDeduplicateReferences(boolean deduplicateReferences) {\r
- this.deduplicateReferences = deduplicateReferences;\r
- }\r
-\r
- public boolean isDeduplicateAuthors() {\r
- return deduplicateAuthors;\r
- }\r
-\r
- public void setDeduplicateAuthors(boolean deduplicateAuthors) {\r
- this.deduplicateAuthors = deduplicateAuthors;\r
- }\r
+ }\r
+ public void setStream(byte[] stream) {\r
+ this.stream = stream;\r
+ }\r
+\r
+ public boolean isDeduplicateReferences() {\r
+ return deduplicateReferences;\r
+ }\r
+ public void setDeduplicateReferences(boolean deduplicateReferences) {\r
+ this.deduplicateReferences = deduplicateReferences;\r
+ }\r
+\r
+ public boolean isDeduplicateAuthors() {\r
+ return deduplicateAuthors;\r
+ }\r
+ public void setDeduplicateAuthors(boolean deduplicateAuthors) {\r
+ this.deduplicateAuthors = deduplicateAuthors;\r
+ }\r
+\r
+ public String getWorksheetName() {\r
+ return worksheetName;\r
+ }\r
+ public void setWorksheetName(String worksheetName) {\r
+ this.worksheetName = worksheetName;\r
+ }\r
}\r
package eu.etaxonomy.cdm.io.excel.common;
-import java.util.HashMap;
+import java.util.Map;
import org.apache.log4j.Logger;
private Integer currentLine;
private ROW currentRow;
- private HashMap<String, String> originalRecord;
+ private Map<String, String> originalRecord;
private Reference sourceReference;
-
public ExcelImportState(CONFIG config) {
super(config);
}
-
public Integer getCurrentLine() {
return currentLine;
}
this.currentRow = currentRow;
}
- public HashMap<String,String> getOriginalRecord(){
+ public Map<String,String> getOriginalRecord(){
return this.originalRecord;
}
- public void setOriginalRecord(HashMap<String,String> originalRecord){
+ public void setOriginalRecord(Map<String,String> originalRecord){
this.originalRecord = originalRecord;
}
\r
package eu.etaxonomy.cdm.io.excel.common;\r
\r
-import java.util.HashMap;\r
import java.util.List;\r
+import java.util.Map;\r
import java.util.Set;\r
import java.util.UUID;\r
\r
import eu.etaxonomy.cdm.io.specimen.excel.in.SpecimenCdmExcelImportState;\r
import eu.etaxonomy.cdm.io.specimen.excel.in.SpecimenRow;\r
import eu.etaxonomy.cdm.model.common.CdmBase;\r
-import eu.etaxonomy.cdm.model.common.DefinedTermBase;\r
import eu.etaxonomy.cdm.model.common.Extension;\r
import eu.etaxonomy.cdm.model.common.ExtensionType;\r
import eu.etaxonomy.cdm.model.common.IdentifiableEntity;\r
protected static final String LANGUAGE = "(?i)(Language)";\r
\r
@Override\r
- protected void analyzeRecord(HashMap<String, String> record, STATE state) {\r
+ protected void analyzeRecord(Map<String, String> record, STATE state) {\r
Set<String> keys = record.keySet();\r
\r
ROW row = createDataHolderRow();\r
* @param keyValue\r
* @return\r
*/\r
- protected KeyValue makeKeyValue(HashMap<String, String> record, String originalKey, STATE state) {\r
+ protected KeyValue makeKeyValue(Map<String, String> record, String originalKey, STATE state) {\r
KeyValue keyValue = new KeyValue();\r
keyValue.originalKey = originalKey;\r
String indexedKey = CdmUtils.removeDuplicateWhitespace(originalKey.trim()).toString();\r
\r
protected boolean analyzeFeatures(STATE state, KeyValue keyValue) {\r
String key = keyValue.key;\r
- Pager<DefinedTermBase> features = getTermService().findByTitleWithRestrictions(Feature.class, key, null, null, null, null, null, null);\r
+ Pager<Feature> features = getTermService().findByTitleWithRestrictions(Feature.class, key, null, null, null, null, null, null);\r
\r
if (features.getCount() > 1){\r
String message = "More than one feature found matching key " + key;\r
}else if (features.getCount() == 0){\r
return false;\r
}else{\r
- Feature feature = CdmBase.deproxy(features.getRecords().get(0), Feature.class);\r
+ Feature feature = CdmBase.deproxy(features.getRecords().get(0));\r
ROW row = state.getCurrentRow();\r
if ( keyValue.isKeyData()){\r
row.putFeature(feature.getUuid(), keyValue.index, keyValue.value);\r
if (logger.isDebugEnabled()) { logger.debug("Importing distribution data"); }\r
\r
// read and save all rows of the excel worksheet\r
- ArrayList<HashMap<String, String>> recordList;\r
+ List<Map<String, String>> recordList;\r
URI source = state.getConfig().getSource();\r
try{\r
recordList = ExcelUtils.parseXLS(source);\r
return;\r
}\r
if (recordList != null) {\r
- HashMap<String,String> record = null;\r
+ Map<String,String> record = null;\r
TransactionStatus txStatus = startTransaction();\r
\r
for (int i = 0; i < recordList.size(); i++) {\r
/**\r
* Reads the data of one Excel sheet row\r
*/\r
- private void analyzeRecord(HashMap<String,String> record) {\r
+ private void analyzeRecord(Map<String,String> record) {\r
/*\r
* Relevant columns:\r
* Name (EDIT)\r
/**
* @author a.babadshanjan
* @since 09.01.2009
- * @version 1.0
*/
public abstract class TaxonExcelImportBase
-extends ExcelTaxonOrSpecimenImportBase<TaxonExcelImportState, ExcelImportConfiguratorBase, ExcelRowBase> {
- @SuppressWarnings("unused")
+ extends ExcelTaxonOrSpecimenImportBase<TaxonExcelImportState, ExcelImportConfiguratorBase, ExcelRowBase> {
+
+ @SuppressWarnings("unused")
private static final Logger logger = Logger.getLogger(TaxonExcelImportBase.class);
/*
import java.net.URI;
import java.util.ArrayList;
import java.util.Arrays;
-import java.util.HashMap;
import java.util.List;
+import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
* {@inheritDoc}
*/
@Override
- protected void analyzeRecord(HashMap<String, String> record, MediaExcelImportState state) {
+ protected void analyzeRecord(Map<String, String> record, MediaExcelImportState state) {
// do nothing
}
*/
@Override
protected void firstPass(MediaExcelImportState state) {
- HashMap<String, String> record = state.getOriginalRecord();
+ Map<String, String> record = state.getOriginalRecord();
String line = "row " + state.getCurrentLine() + ": ";
String linePure = "row " + state.getCurrentLine();
System.out.println(linePure);
//date
String dateStr = record.get(COL_DATE);
- if (isNotBlank(artistStr)){
+ if (isNotBlank(dateStr)){
TimePeriod timePeriod = TimePeriodParser.parseString(dateStr);
if (timePeriod.getFreeText()!= null){
String message = "Date could not be parsed: %s";
message = String.format(message, dateStr);
state.getResult().addWarning(message, null, line);
}
- if (timePeriod.getEnd() != null){
- String message = "Date is a period with an end date. Periods are currently not yet supported: %s";
- message = String.format(message, dateStr);
- state.getResult().addWarning(message, null, line);
- }
- Partial start = timePeriod.getStart();
- DateTime dateTime = toDateTime(state, start, dateStr, line);
- media.setMediaCreated(TimePeriod.NewInstance(dateTime));
+ media.setMediaCreated(timePeriod);
}
//URLs
*/
private List<URI> getUrls(MediaExcelImportState state, String line) {
List<URI> list = new ArrayList<>();
- HashMap<String, String> record = state.getOriginalRecord();
+ Map<String, String> record = state.getOriginalRecord();
for (String str : record.keySet()){
if (str.equals("url") || str.matches("url_size\\d+") ){
String url = record.get(str);
}
Person result = (Person)getDeduplicationHelper(state).getExistingAuthor(null, person);
- return person;
+ return result;
}
/**
name.addTypeDesignation(designation, true);
}
}
+ save(state.getDerivedUnitBase(), state);
for (String[] fullReference : state.getDataHolder().getReferenceList()) {
* @param collectorNames
*/
public UnitsGatheringEvent(ITermService termService, String locality, String languageIso, Double longitude,
- Double latitude, String elevationText, String elevationMin, String elevationMax, String elevationUnit,
+ Double latitude, String errorRadius, String elevationText, String elevationMin, String elevationMax, String elevationUnit,
String date, String gatheringNotes, String gatheringMethod, ReferenceSystem referenceSystem,
Abcd206ImportConfigurator config) {
this.setLocality(termService, locality, languageIso);
- this.setCoordinates(longitude, latitude, referenceSystem);
+
+ Integer errorRadiusInt = Integer.getInteger(errorRadius);
+
+ this.setCoordinates(longitude, latitude, referenceSystem, errorRadiusInt);
this.setDate(date);
this.setNotes(gatheringNotes);
this.setElevation(elevationText, elevationMin, elevationMax, elevationUnit);
* @param: latitude
*/
public void setCoordinates(Double longitude, Double latitude){
- setCoordinates(longitude, latitude, null);
+ setCoordinates(longitude, latitude, null, null);
}
- public void setCoordinates(Double longitude, Double latitude, ReferenceSystem referenceSystem){
+ public void setCoordinates(Double longitude, Double latitude, ReferenceSystem referenceSystem, Integer errorRadius){
//create coordinates point
if((longitude == null) || (latitude == null)){
return;
if (latitude != 0.0) {
coordinates.setLatitude(latitude);
}
+ if (errorRadius != 0) {
+ coordinates.setErrorRadius(errorRadius);
+ }
coordinates.setReferenceSystem(referenceSystem);
this.gatheringEvent.setExactLocation(coordinates);
package eu.etaxonomy.cdm.io.specimen.abcd206.in;
+import java.io.IOException;
import java.io.InputStream;
import java.net.MalformedURLException;
import java.net.URI;
import eu.etaxonomy.cdm.api.application.ICdmRepository;
import eu.etaxonomy.cdm.api.facade.DerivedUnitFacade;
+import eu.etaxonomy.cdm.common.StreamUtils;
import eu.etaxonomy.cdm.ext.occurrence.bioCase.BioCaseQueryServiceWrapper;
import eu.etaxonomy.cdm.hibernate.HibernateProxyHelper;
import eu.etaxonomy.cdm.io.common.ICdmIO;
import eu.etaxonomy.cdm.model.common.TermVocabulary;
import eu.etaxonomy.cdm.model.location.NamedArea;
import eu.etaxonomy.cdm.model.media.Media;
+import eu.etaxonomy.cdm.model.media.Rights;
+import eu.etaxonomy.cdm.model.media.RightsType;
import eu.etaxonomy.cdm.model.molecular.DnaSample;
import eu.etaxonomy.cdm.model.occurrence.Collection;
import eu.etaxonomy.cdm.model.occurrence.DerivationEvent;
import eu.etaxonomy.cdm.model.reference.Reference;
import eu.etaxonomy.cdm.model.reference.ReferenceFactory;
import eu.etaxonomy.cdm.model.taxon.Classification;
+import eu.etaxonomy.cdm.strategy.parser.TimePeriodParser;
/**
* @author p.kelbert
} finally {
state.getReport().printReport(state.getConfig().getReportUri());
}
-
if (state.getConfig().isDownloadSequenceData()) {
- // download fasta files for imported sequences
- // TODO: where to store the files and how to create the new Blast DB
+ for (URI uri:state.getSequenceDataStableIdentifier()){
+ // Files.createDirectories(file.getParent()); // optional, make sure parent dir exists
+ try {
+ StreamUtils.downloadFile(uri.toURL(), "temp");
+ } catch (IOException e) {
+ // TODO Auto-generated catch block
+ e.printStackTrace();
+ }
+
+ }
}
+
return;
}
// gathering event
UnitsGatheringEvent unitsGatheringEvent = new UnitsGatheringEvent(cdmAppController.getTermService(),
state.getDataHolder().locality, state.getDataHolder().languageIso, state.getDataHolder().longitude,
- state.getDataHolder().latitude, state.getDataHolder().getGatheringElevationText(),
+ state.getDataHolder().latitude, state.getDataHolder().getGatheringCoordinateErrorMethod() , state.getDataHolder().getGatheringElevationText(),
state.getDataHolder().getGatheringElevationMin(), state.getDataHolder().getGatheringElevationMax(),
state.getDataHolder().getGatheringElevationUnit(), state.getDataHolder().getGatheringDateText(),
state.getDataHolder().getGatheringNotes(), state.getDataHolder().getGatheringMethod(),
}
}
+ if (attributes.containsKey("CreateDate")) {
+ String createDate = attributes.get("CreateDate");
+
+ if (createDate != null) {
+
+ media.setMediaCreated(TimePeriodParser.parseString(createDate));
+ }
+
+ }
+
+
+ if (attributes.containsKey("License")) {
+ String licence = attributes.get("License");
+
+ if (licence != null) {
+ Rights right = Rights.NewInstance(licence, Language.ENGLISH(), RightsType.LICENSE());
+ media.addRights(right);
+ }
+
+ }
+
+
derivedUnitFacade.addDerivedUnitMedia(media);
if (state.getConfig().isAddMediaAsMediaSpecimen()) {
type = SpecimenOrObservationType.PreservedSpecimen;
} else if (state.getDataHolder().getRecordBasis().toLowerCase().startsWith("o")
|| state.getDataHolder().getRecordBasis().toLowerCase().indexOf("observation") > -1) {
- type = SpecimenOrObservationType.Observation;
+ if (state.getDataHolder().getRecordBasis().toLowerCase().contains("machine") && state.getDataHolder().getRecordBasis().toLowerCase().contains("observation")){
+ type = SpecimenOrObservationType.MachineObservation;
+ }else if (state.getDataHolder().getRecordBasis().toLowerCase().contains("human") && state.getDataHolder().getRecordBasis().toLowerCase().contains("observation")){
+ type = SpecimenOrObservationType.HumanObservation;
+ }else{
+ type = SpecimenOrObservationType.Observation;
+ }
} else if (state.getDataHolder().getRecordBasis().toLowerCase().indexOf("fossil") > -1) {
type = SpecimenOrObservationType.Fossil;
} else if (state.getDataHolder().getRecordBasis().toLowerCase().indexOf("materialsample") > -1) {
abcdFieldGetter.getGatheringImages(root);
abcdFieldGetter.getGatheringMethod(root);
abcdFieldGetter.getAssociatedUnitIds(root);
+ abcdFieldGetter.getPreparation(root);
abcdFieldGetter.getUnitNotes(root);
boolean referencefound = abcdFieldGetter.getReferences(root);
// if (!referencefound && state.getRef() != null) {
private Set<URI> sequenceDataStableIdentifier = new HashSet<>();
+
//****************** CONSTRUCTOR ***************************************************/
.equals(prefix + "SpecimenUnit")) {
types = results.item(k).getChildNodes();
for (int l = 0; l < types.getLength(); l++) {
+ /*
+ *NomenclaturalTypeDesignations>
+ *<abcd21:NomenclaturalTypeDesignation>
+ *<abcd21:TypeStatus>epitype</abcd21:TypeStatus>
+ *</abcd21:NomenclaturalTypeDesignation>
+ *</abcd21:NomenclaturalTypeDesignations>
+ */
if (types.item(l).getNodeName().equals(prefix+ "NomenclaturalTypeDesignations")) {
ntds = types.item(l).getChildNodes();
for (int m = 0; m < ntds.getLength(); m++) {
}
dataHolder.getStatusList().add(getSpecimenTypeDesignationStatusByKey(type));
typeFound=true;
- path = ntd.item(l).getNodeName();
- getHierarchie(ntd.item(l));
- dataHolder.knownABCDelements.add(path);
- path = "";
+ // path = ntd.item(n).getNodeName();
+ // getHierarchie(ntd.item(l));
+ // dataHolder.knownABCDelements.add(path);
+ // path = "";
}
}
}
}
}
} catch (NullPointerException e) {
+ System.err.println(e.getMessage());
dataHolder.setStatusList(new ArrayList<SpecimenTypeDesignationStatus>());
}
}
// logger.info("TMP NAME P" + tmpName);
dataHolder.getIdentificationList().add(new Identification(tmpName, preferredFlag, dataHolder.getNomenclatureCode(), identifierStr, dateStr));
} else {
- dataHolder.getIdentificationList().add(new Identification(tmpName, preferredFlag, dateStr));
+ dataHolder.getIdentificationList().add(new Identification(tmpName, preferredFlag,identifierStr, dateStr));
}
}
}
&& dataHolder.getNomenclatureCode() != "") {
dataHolder.getIdentificationList().add(new Identification(tmpName, "0", dataHolder.getNomenclatureCode(), null, dateStr));
} else {
- dataHolder.getIdentificationList().add(new Identification(tmpName, "0", dateStr));
+ dataHolder.getIdentificationList().add(new Identification(tmpName, "0", null, dateStr));
}
}
}
for (int k = 0; k < results.getLength(); k++) {
if (results.item(k).getNodeName().equals(prefix + "Identifier")) {
+ /*
+ * <abcd21:Identifiers>
+ * <abcd21:Identifier>
+ * <abcd21:PersonName>
+ * <abcd21:FullName>R. Jahn</abcd21:FullName>
+ * </abcd21:PersonName>
+ * </abcd21:Identifier>
+ * </abcd21:Identifiers>
+ */
identifier = results.item(k).getChildNodes();
for (int l = 0; l < identifier.getLength(); l++) {
if (identifier.item(l).getNodeName().equals(prefix + "PersonName")) {
} catch (NullPointerException e) {
dataHolder.latitude = null;
}
+ try {
+ group = root.getElementsByTagName(prefix + "CoordinateErrorDistanceInMeters");
+ path = group.item(0).getNodeName();
+ getHierarchie(group.item(0));
+ dataHolder.knownABCDelements.add(path);
+ path = "";
+ dataHolder.setGatheringCoordinateErrorMethod(group.item(0).getTextContent());
+ } catch (NullPointerException e) {
+ dataHolder.latitude = null;
+ }
try {
group = root.getElementsByTagName(prefix + "Country");
childs = group.item(0).getChildNodes();
* @param root
*/
protected void getMultimedia(Element root) {
- NodeList group, multimedias, multimedia, creators, creator, copyRightNodes, iprNodes, textNodes, licences, copyrights;
+ NodeList group, multimedias, multimedia, creators, creator, copyRightNodes, iprNodes, textNodes, licenceNodes, licences, copyrights;
try {
group = root.getElementsByTagName(prefix + "MultiMediaObjects");
for (int i = 0; i < group.getLength(); i++) {
if (multimedias.item(j).getNodeName().equals(prefix + "MultiMediaObject")) {
multimedia = multimedias.item(j).getChildNodes();
Map<String,String> mediaObjectMap = new HashMap<String, String>();
- String fileUri = "";
- for (int k = 0; k < multimedia.getLength(); k++) {
- if (multimedia.item(k).getNodeName().equalsIgnoreCase(prefix + "fileURI")) {
- fileUri = multimedia.item(k).getTextContent();
- mediaObjectMap.put("fileUri", fileUri);
- path = multimedia.item(k).getNodeName();
- getHierarchie(multimedia.item(k));
- dataHolder.knownABCDelements.add(path);
- path = "";
- } else if (multimedia.item(k).getNodeName().equalsIgnoreCase(prefix + "context")) {
- mediaObjectMap.put("Context", multimedia.item(k).getTextContent());
- } else if (multimedia.item(k).getNodeName().equalsIgnoreCase(prefix + "Creators")){
- String creatorString = "";
- creators = multimedia.item(k).getChildNodes();
- for (int l = 0; l < creators.getLength(); l++) {
-
- if (creators.item(l).getNodeName().equalsIgnoreCase(prefix + "Creator")){
- if (creatorString != ""){
- creatorString += ", ";
- }
- creatorString += creators.item(l).getTextContent();
+
+ String fileUri = extractMediaInformation(multimedia, mediaObjectMap);
+ if (fileUri != ""){
+ dataHolder.putMultiMediaObject(fileUri,mediaObjectMap);
+ }
+ }
+ }
+ }
+ } catch (NullPointerException e) {
+ logger.info(e);
+ }
+ }
+
+
+ /**
+ * @param multimedia
+ * @param mediaObjectMap
+ * @param fileUri
+ * @return
+ */
+ private String extractMediaInformation(NodeList multimedia, Map<String, String> mediaObjectMap) {
+ NodeList creators;
+ NodeList copyRightNodes;
+ NodeList iprNodes;
+ NodeList licenceNodes;
+ NodeList license;
+ NodeList copyrights;
+ String fileUri = "";
+ for (int k = 0; k < multimedia.getLength(); k++) {
+ if (multimedia.item(k).getNodeName().equalsIgnoreCase(prefix + "fileURI")) {
+ fileUri = multimedia.item(k).getTextContent();
+ mediaObjectMap.put("fileUri", fileUri);
+ path = multimedia.item(k).getNodeName();
+ getHierarchie(multimedia.item(k));
+ dataHolder.knownABCDelements.add(path);
+ path = "";
+ } else if (multimedia.item(k).getNodeName().equalsIgnoreCase(prefix + "context")) {
+ mediaObjectMap.put("Context", multimedia.item(k).getTextContent());
+ } else if (multimedia.item(k).getNodeName().equalsIgnoreCase(prefix + "createDate")) {
+ mediaObjectMap.put("CreateDate", multimedia.item(k).getTextContent());
+ }else if (multimedia.item(k).getNodeName().equalsIgnoreCase(prefix + "Creators")){
+ String creatorString = "";
+ creators = multimedia.item(k).getChildNodes();
+ for (int l = 0; l < creators.getLength(); l++) {
+
+ if (creators.item(l).getNodeName().equalsIgnoreCase(prefix + "Creator")){
+ if (creatorString != ""){
+ creatorString += ", ";
+ }
+ creatorString += creators.item(l).getTextContent();
+ }
+ }
+ mediaObjectMap.put("Creators",creatorString);
+ } else if (multimedia.item(k).getNodeName().equalsIgnoreCase(prefix + "Creator")){
+ mediaObjectMap.put("Creators",multimedia.item(k).getTextContent());
+ } else if (multimedia.item(k).getNodeName().equals("CreatedDate")){
+ mediaObjectMap.put("CreatedDate",multimedia.item(k).getTextContent());
+ } else if (multimedia.item(k).getNodeName().equalsIgnoreCase(prefix + "comment")){
+ mediaObjectMap.put("Comment",multimedia.item(k).getTextContent());
+ } else if (multimedia.item(k).getNodeName().equalsIgnoreCase(prefix + "IPR")){
+ String copyRightString = "";
+ iprNodes = multimedia.item(k).getChildNodes();
+ for (int l = 0; l < iprNodes.getLength(); l++) {
+ if (iprNodes.item(l).getNodeName().equalsIgnoreCase(prefix + "Copyrights")){
+ copyRightNodes = iprNodes.item(l).getChildNodes();
+ for (int m = 0; m < copyRightNodes.getLength(); m++) {
+ if (copyRightNodes.item(l).getNodeName().equalsIgnoreCase(prefix + "Copyright")){
+ copyrights = copyRightNodes.item(l).getChildNodes();
+ for (int n = 0; n < copyrights.getLength(); n++){
+ if (copyrights.item(n).getNodeName().equalsIgnoreCase(prefix + "text")){
+ mediaObjectMap.put("Copyright", copyrights.item(n).getTextContent());
}
}
- mediaObjectMap.put("Creators",creatorString);
- } else if (multimedia.item(k).getNodeName().equalsIgnoreCase(prefix + "Creator")){
- mediaObjectMap.put("Creators",multimedia.item(k).getTextContent());
- } else if (multimedia.item(k).getNodeName().equals("CreatedDate")){
- mediaObjectMap.put("CreatedDate",multimedia.item(k).getTextContent());
- } else if (multimedia.item(k).getNodeName().equalsIgnoreCase(prefix + "comment")){
- mediaObjectMap.put("Comment",multimedia.item(k).getTextContent());
- } else if (multimedia.item(k).getNodeName().equalsIgnoreCase(prefix + "IPR")){
- String copyRightString = "";
- iprNodes = multimedia.item(k).getChildNodes();
- for (int l = 0; l < iprNodes.getLength(); l++) {
- if (iprNodes.item(l).getNodeName().equalsIgnoreCase(prefix + "Copyrights")){
- copyRightNodes = iprNodes.item(l).getChildNodes();
- for (int m = 0; m < copyRightNodes.getLength(); m++) {
- if (copyRightNodes.item(l).getNodeName().equalsIgnoreCase(prefix + "Copyright")){
- copyrights = copyRightNodes.item(l).getChildNodes();
- for (int n = 0; n < copyrights.getLength(); n++){
- if (copyrights.item(n).getNodeName().equalsIgnoreCase(prefix + "text")){
- //TODO: decide whether this is the copyright owner or a description text
- }
- }
+ }
+ }
+ } else if (iprNodes.item(l).getNodeName().equalsIgnoreCase(prefix + "Licenses")){
+ licenceNodes = iprNodes.item(l).getChildNodes();
+ for (int m = 0; m < licenceNodes.getLength(); m++) {
+ if (licenceNodes.item(l).getNodeName().equalsIgnoreCase(prefix + "License")){
+ license = licenceNodes.item(l).getChildNodes();
+ for (int n = 0; n < license.getLength(); n++){
+ if (license.item(n).getNodeName().equalsIgnoreCase(prefix + "Text")){
+ mediaObjectMap.put("License", license.item(n).getTextContent());
+ }else{
+ Node node = license.item(n);
+ NodeList children = node.getChildNodes();
+ for (int o = 0; o < children.getLength(); o++){
+ if (children.item(n).getNodeName().equalsIgnoreCase(prefix + "Text")){
+ mediaObjectMap.put("License", children.item(n).getTextContent());
}
+
}
}
}
-
-
-
- // TODO: mediaObjectMap.put("IPR",multimedia.item(k).getTextContent());
}
-
- }
- if (fileUri != ""){
- dataHolder.putMultiMediaObject(fileUri,mediaObjectMap);
}
+ }else{
+ System.err.println(iprNodes.item(l).getNodeName());
}
+
}
+
+ } else{
+ System.err.println(multimedia.item(k).getNodeName());
}
- } catch (NullPointerException e) {
- logger.info(e);
+
}
+ return fileUri;
}
protected void getAssociatedUnitIds(Element root) {
if (children.item(j).getNodeName().equals(prefix + "DateTime")) {
NodeList dateTimes = children.item(j).getChildNodes();
for (int k = 0; k < dateTimes.getLength(); k++) {
- if (dateTimes.item(k).getNodeName().equals(prefix + "DateText")) {
+ if (dateTimes.item(k).getNodeName().equals(prefix + "ISODateTimeBegin")) {
path = dateTimes.item(k).getNodeName();
getHierarchie(dateTimes.item(k));
dataHolder.knownABCDelements.add(path);
if (multimedias.item(j).getNodeName().equals(prefix + "SiteImage")) {
multimedia = multimedias.item(j).getChildNodes();
Map<String,String> mediaObjectMap = new HashMap<String, String>();
- String fileUri = "";
- for (int k = 0; k < multimedia.getLength(); k++) {
-
+ String fileUri = extractMediaInformation(multimedia, mediaObjectMap);
- if (multimedia.item(k).getNodeName().equalsIgnoreCase(prefix + "fileURI")) {
- fileUri = multimedia.item(k).getTextContent();
- mediaObjectMap.put("fileUri", fileUri);
- path = multimedia.item(k).getNodeName();
- getHierarchie(multimedia.item(k));
- dataHolder.knownABCDelements.add(path);
- path = "";
- } else if (multimedia.item(k).getNodeName().equalsIgnoreCase(prefix + "Context")) {
- mediaObjectMap.put("Context", multimedia.item(k).getTextContent());
- }else if (multimedia.item(k).getNodeName().equalsIgnoreCase(prefix + "Comment")) {
- mediaObjectMap.put("Comment", multimedia.item(k).getTextContent());
- } else if (multimedia.item(k).getNodeName().equalsIgnoreCase(prefix + "Creators")){
- String creatorString = "";
- creators = multimedia.item(k).getChildNodes();
- for (int l = 0; l < creators.getLength(); l++) {
-
- if (creators.item(l).getNodeName().equalsIgnoreCase(prefix + "Creator")){
- if (creatorString != ""){
- creatorString += ", ";
- }
- creatorString += creators.item(l).getTextContent();
- }
- }
- mediaObjectMap.put("Creators",creatorString);
- } else if (multimedia.item(k).getNodeName().equalsIgnoreCase(prefix + "Creator")){
- mediaObjectMap.put("Creators",multimedia.item(k).getTextContent());
- }
- }
if (fileUri != ""){
dataHolder.putGatheringMultiMediaObject(fileUri,mediaObjectMap);
}
private final String code;
private final String date;
- public Identification(String taxonName, String preferred, String date) {
- this(taxonName, preferred, null, null, date);
+ public Identification(String taxonName, String preferred, String identifier, String date) {
+ this(taxonName, preferred, null, identifier, date);
}
public Identification(String scientificName, String preferred, String code, String identifier, String date) {
import eu.etaxonomy.cdm.io.specimen.abcd206.in.AbcdParseUtility;
import eu.etaxonomy.cdm.io.specimen.abcd206.in.SpecimenImportReport;
import eu.etaxonomy.cdm.model.common.DefinedTerm;
-import eu.etaxonomy.cdm.model.common.DefinedTermBase;
import eu.etaxonomy.cdm.model.common.OrderedTerm;
import eu.etaxonomy.cdm.model.media.Media;
import eu.etaxonomy.cdm.model.molecular.Amplification;
if(markerList.item(0)!=null){
String amplificationMarker = markerList.item(0).getTextContent();
DefinedTerm dnaMarker = null;
- List<DefinedTermBase> markersFound = cdmAppController.getTermService().findByTitleWithRestrictions(DefinedTerm.class, amplificationMarker, MatchMode.EXACT, null, null, null, null, null).getRecords();
+ List<DefinedTerm> markersFound = cdmAppController.getTermService().findByTitleWithRestrictions(DefinedTerm.class, amplificationMarker, MatchMode.EXACT, null, null, null, null, null).getRecords();
if(markersFound.size()==1){
- dnaMarker = (DefinedTerm) markersFound.get(0);
+ dnaMarker = markersFound.get(0);
}
else{
dnaMarker = DefinedTerm.NewDnaMarkerInstance(amplificationMarker, amplificationMarker, amplificationMarker);
//consensus sequence
NodeList sequencingsList = amplificationElement.getElementsByTagName(prefix+"Sequencings");
- if(sequencingsList.item(0)!=null && sequencingsList.item(0) instanceof Element){
- parseAmplificationSequencings((Element)sequencingsList.item(0), amplification, amplificationResult, dnaSample, state);
+ if(sequencingsList.item(0)!=null) {
+ if ( sequencingsList.item(0) instanceof Element){
+ Element el = (Element)sequencingsList.item(0);
+ parseAmplificationSequencings(el, amplification, amplificationResult, dnaSample, state);
+ }
}
+
+
parseAmplificationPrimers(amplificationElement.getElementsByTagName(prefix+"AmplificationPrimers"));
}
}
//contig file URL
NodeList consensusSequenceChromatogramFileURIList = sequencing.getElementsByTagName(prefix+"consensusSequenceChromatogramFileURI");
URI uri = AbcdParseUtility.parseFirstUri(consensusSequenceChromatogramFileURIList, report);
- if (uri.toString().endsWith("fasta")){
+ if (uri != null && uri.toString().endsWith("fasta")){
state.putSequenceDataStableIdentifier(uri);
}else{
Media contigFile = Media.NewInstance(uri, null, null, null);
package eu.etaxonomy.cdm.io.specimen.excel.in;
-import java.util.HashMap;
+import java.util.Map;
import java.util.Set;
import java.util.UUID;
}
@Override
- protected void analyzeRecord(HashMap<String, String> record, SpecimenCdmExcelImportState state) {
+ protected void analyzeRecord(Map<String, String> record, SpecimenCdmExcelImportState state) {
Set<String> keys = record.keySet();
NamedAreaLevellRow row = new NamedAreaLevellRow();
}
@Override
- protected String getWorksheetName() {
+ protected String getWorksheetName(SpecimenCdmExcelImportConfigurator config) {
return WORKSHEET_NAME;
}
package eu.etaxonomy.cdm.io.specimen.excel.in;
-import java.util.HashMap;
+import java.util.Map;
import java.util.Set;
import java.util.UUID;
}
@Override
- protected void analyzeRecord(HashMap<String, String> record, SpecimenCdmExcelImportState state) {
+ protected void analyzeRecord(Map<String, String> record, SpecimenCdmExcelImportState state) {
Set<String> keys = record.keySet();
NamedAreaLevellRow row = new NamedAreaLevellRow();
}
@Override
- protected String getWorksheetName() {
+ protected String getWorksheetName(SpecimenCdmExcelImportConfigurator config) {
return WORKSHEET_NAME;
}
private static final String REFERENCE_SYSTEM_COLUMN = "(?i)(ReferenceSystem)";
private static final String ERROR_RADIUS_COLUMN = "(?i)(ErrorRadius)";
-
private static final String COLLECTORS_NUMBER_COLUMN = "(?i)((Collectors|Field)Number)";
private static final String ECOLOGY_COLUMN = "(?i)(Ecology|Habitat)";
private static final String PLANT_DESCRIPTION_COLUMN = "(?i)(PlantDescription)";
private static final String FIELD_NOTES_COLUMN = "(?i)(FieldNotes)";
private static final String SEX_COLUMN = "(?i)(Sex)";
-
private static final String ACCESSION_NUMBER_COLUMN = "(?i)(AccessionNumber)";
private static final String BARCODE_COLUMN = "(?i)(Barcode)";
private static final String COLLECTION_CODE_COLUMN = "(?i)(CollectionCode)";
private static final String COLLECTION_COLUMN = "(?i)(Collection)";
private static final String UNIT_NOTES_COLUMN = "(?i)((Unit)?Notes)";
-
private static final String TYPE_CATEGORY_COLUMN = "(?i)(TypeCategory)";
private static final String TYPIFIED_NAME_COLUMN = "(?i)(TypifiedName|TypeOf)";
-
private static final String SOURCE_COLUMN = "(?i)(Source)";
private static final String ID_IN_SOURCE_COLUMN = "(?i)(IdInSource)";
-
private static final String DETERMINATION_AUTHOR_COLUMN = "(?i)(Author)";
private static final String DETERMINATION_MODIFIER_COLUMN = "(?i)(DeterminationModifier)";
private static final String DETERMINED_BY_COLUMN = "(?i)(DeterminationBy)";
private static final String DETERMINATION_NOTES_COLUMN = "(?i)(DeterminationNote)";
private static final String EXTENSION_COLUMN = "(?i)(Ext(ension)?)";
-
public SpecimenCdmExcelImport() {
super();
}
-
-
-
@Override
protected void analyzeSingleValue(KeyValue keyValue, SpecimenCdmExcelImportState state) {
SpecimenRow row = state.getCurrentRow();
}else{
logger.warn("Extension without postfix not yet implemented");
}
-
}else {
state.setUnsuccessfull();
logger.error("Unexpected column header " + keyValue.originalKey);
return;
}
-
@Override
protected void firstPass(SpecimenCdmExcelImportState state) {
SpecimenRow row = state.getCurrentRow();
}
DerivedUnitFacade facade = DerivedUnitFacade.NewInstance(type);
-
Language lang = Language.DEFAULT();
if (StringUtils.isNotBlank(row.getLanguage())){
Language langIso = getTermService().getLanguageByIso(row.getLanguage());
return;
}
-
-
}
private void handleAreas(DerivedUnitFacade facade, SpecimenRow row, SpecimenCdmExcelImportState state) {
}
}
-
for (DeterminationLight determinationLight : row.getDetermination()){
Taxon taxon;
if (! hasCommonTaxonInfo){
if (state.getConfig().isUseMaterialsExaminedForIndividualsAssociations()){
feature = Feature.MATERIALS_EXAMINED();
}
-
indivAssociciation.setFeature(feature);
}
if (state.getConfig().isDeterminationsAreDeterminationEvent()){
//name
INonViralName name;
- INonViralNameParser parser = NonViralNameParserImpl.NewInstance();
+ INonViralNameParser<INonViralName> parser = NonViralNameParserImpl.NewInstance();
NomenclaturalCode nc = state.getConfig().getNomenclaturalCode();
if (StringUtils.isNotBlank(commonDetermination.fullName)){
name = parser.parseFullName(commonDetermination.fullName, nc, rank);
//return
return taxon;
-
}
-
-
-
private void setAuthorship(INonViralName name, String author, INonViralNameParser<INonViralName> parser) {
if (name.isBotanical() || name.isZoological()){
try {
}
}
-
-
/**
* This method tries to find the best matching taxon depending on the import configuration,
* the taxon name information and the concept information available.
*
- *
* @param state
* @param determinationLight
* @param createIfNotExists
if (StringUtils.isNotBlank(computedTitleCache)){
titleCache = computedTitleCache;
}
-
}
return titleCache;
}
}else{
return null;
}
-
}
-
private DeterminationEvent makeDeterminationEvent(SpecimenCdmExcelImportState state, DeterminationLight determination, Taxon taxon) {
DeterminationEvent event = DeterminationEvent.NewInstance();
//taxon
return null;
}
AgentBase<?> collector = facade.getCollector();
- List<Person> collectors = new ArrayList<Person>();
+ List<Person> collectors = new ArrayList<>();
if (collector.isInstanceOf(Team.class) ){
Team team = CdmBase.deproxy(collector, Team.class);
collectors.addAll(team.getTeamMembers());
return result;
}
-
-
private Collection getOrMakeCollection(SpecimenCdmExcelImportState state, String collectionCode, String collectionString) {
Collection result = state.getCollection(collectionCode);
if (result == null){
NonViralNameParserImpl parser = NonViralNameParserImpl.NewInstance();
NomenclaturalCode code = state.getConfig().getNomenclaturalCode();
result = (TaxonName)parser.parseFullName(name, code, null);
-
}
if (result != null){
state.putName(name, result);
} catch (UndefinedTransformerMethodException e) {
throw new RuntimeException("getSpecimenTypeDesignationStatusByKey not yet implemented");
}
-
-
}
}
}
-
-
// lat/ long /error
try {
String longitude = row.getLongitude();
String message = "Problems when parsing exact location for line %d";
message = String.format(message, state.getCurrentLine());
logger.warn(message);
-
}
-
-
-
}
-
/*
* Set the current Country
* Search in the DB if the isoCode is known
@Override
- protected String getWorksheetName() {
+ protected String getWorksheetName(SpecimenCdmExcelImportConfigurator config) {
return WORKSHEET_NAME;
}
return false;
}
-
- /* (non-Javadoc)
- * @see eu.etaxonomy.cdm.io.excel.common.ExcelTaxonOrSpecimenImportBase#createDataHolderRow()
- */
@Override
protected SpecimenRow createDataHolderRow() {
return new SpecimenRow();
}
-
-
-
- /* (non-Javadoc)
- * @see eu.etaxonomy.cdm.io.common.CdmIoBase#doCheck(eu.etaxonomy.cdm.io.common.IoStateBase)
- */
@Override
protected boolean doCheck(SpecimenCdmExcelImportState state) {
logger.warn("Validation not yet implemented for " + this.getClass().getSimpleName());
return true;
}
-
-
@Override
protected boolean isIgnore(SpecimenCdmExcelImportState state) {
return !state.getConfig().isDoSpecimen();
}
-
-
}
refreshTransaction();
URI source = state.getConfig().getSource();
- ArrayList<HashMap<String,String>> unitsList = null;
+ List<Map<String,String>> unitsList = null;
try{
unitsList = ExcelUtils.parseXLS(source);
logger.info("unitslist : "+unitsList.size());
specimenOrObs = getOccurrenceService().listByAssociatedTaxon(null, null, taxon, null, null, null, null, null);
}
- HashMap<String,String> unit=null;
+ Map<String,String> unit=null;
MyHashMap<String,String> myunit;
for (int i=0; i<unitsList.size();i++){
// for (int i=0; i<10;i++){
* @param unitsList
* @param state
*/
- private void prepareCollectors(ArrayList<HashMap<String, String>> unitsList, SpecimenSynthesysExcelImportState state) {
+ private void prepareCollectors(List<Map<String, String>> unitsList, SpecimenSynthesysExcelImportState state) {
System.out.println("PREPARE COLLECTORS");
- List<String> collectors = new ArrayList<String>();
- List<String> teams = new ArrayList<String>();
- List<List<String>> collectorinteams = new ArrayList<List<String>>();
+ List<String> collectors = new ArrayList<>();
+ List<String> teams = new ArrayList<>();
+ List<List<String>> collectorinteams = new ArrayList<>();
String tmp;
- for (HashMap<String,String> unit : unitsList){
+ for (Map<String,String> unit : unitsList){
tmp=null;
tmp = unit.get("collector");
if (tmp != null && !tmp.isEmpty()) {
ResultWrapper<Boolean> success = ResultWrapper.NewInstance(true);\r
String childName;\r
boolean obligatory;\r
- String idNamespace = "TaxonRelation";\r
+ String idNamespace = "TaxonRelationDTO";\r
\r
TcsXmlImportConfigurator config = state.getConfig();\r
Element elDataSet = super.getDataSetElement(config);\r
\r
sddImport.doInvoke(new SDDImportState(importConfigurator));\r
\r
- logger.setLevel(Level.DEBUG);\r
+ //logger.setLevel(Level.DEBUG);\r
commitAndStartNewTransaction(new String[]{"DEFINEDTERMBASE"});\r
logger.setLevel(Level.DEBUG);\r
\r
<parent>
<artifactId>cdmlib-parent</artifactId>
<groupId>eu.etaxonomy</groupId>
- <version>5.2.0</version>
+ <version>5.3.0</version>
<relativePath>../pom.xml</relativePath>
</parent>
<modelVersion>4.0.0</modelVersion>
--- /dev/null
+/**
+* Copyright (C) 2018 EDIT
+* European Distributed Institute of Taxonomy
+* http://www.e-taxonomy.eu
+*
+* The contents of this file are subject to the Mozilla Public License Version 1.1
+* See LICENSE.TXT at the top of this package for the full license terms.
+*/
+package eu.etaxonomy.cdm.exception;
+
+/**
+ * This Exception is for throwing an exception if a filter prevents a method
+ * from returning any value except an empty one.
+ * This is because the filter condition itself can never be <code>true</code>
+ * (e.g. because a given subtree ID does not exist in the database at all)
+ * or because non of the data matches the filter.
+ * <BR>
+ * In the first case it is recommended to set the invalidFilter parameter to <code>true</code>
+ * to indicate that the filter itself might be not correct.
+ *
+ * @author a.mueller
+ * @since 14.09.2018
+ */
+public class FilterException extends Exception {
+
+ private static final long serialVersionUID = 7491596488082796101L;
+
+ private boolean invalidFilter;
+
+ public FilterException(boolean invalidFilter) {
+ super();
+ this.setInvalidFilter(invalidFilter);
+ }
+
+ public FilterException(String message, boolean invalidFilter) {
+ super(message);
+ this.setInvalidFilter(invalidFilter);
+ }
+
+ /**
+ * @param cause
+ */
+ public FilterException(Throwable cause, boolean invalidFilter) {
+ super(cause);
+ this.setInvalidFilter(invalidFilter);
+ }
+
+ /**
+ * @param message
+ * @param cause
+ */
+ public FilterException(String message, Throwable cause, boolean invalidFilter) {
+ super(message, cause);
+ this.setInvalidFilter(invalidFilter);
+ }
+
+ public boolean isInvalidFilter() {
+ return invalidFilter;
+ }
+
+ public void setInvalidFilter(boolean invalidFilter) {
+ this.invalidFilter = invalidFilter;
+ }
+
+}
/**
* Factory class that instantiates a matching ICdmFormatter for the given object
* and configures the format according to the given formatKeys.
- *
+ *
* @author pplitzner
* @since Nov 30, 2015
*
/**
* Returns a matching ICdmFormatter for the given object configured with the
* given formatKeys
- *
+ *
* @param object
* the object which should be formatted as a string
* @param formatKeys
/**
* Convenience method which directly formats the given object according to
* the given formatKeys.
- *
+ *
* @param object
* the object which should be formatted as a string
* @param formatKeys
import java.util.ArrayList;
import java.util.List;
-import org.codehaus.plexus.util.StringUtils;
+import org.apache.commons.lang.StringUtils;
import eu.etaxonomy.cdm.common.CdmUtils;
+import eu.etaxonomy.cdm.common.UTF8;
import eu.etaxonomy.cdm.model.agent.Person;
import eu.etaxonomy.cdm.model.agent.Team;
import eu.etaxonomy.cdm.model.agent.TeamOrPersonBase;
*/
public class TaxonRelationshipFormatter {
+ /**
+ *
+ */
+ private static final String DOUBTFUL_TAXON_MARKER = "?" + UTF8.NARROW_NO_BREAK;
private static final String REL_SEC = ", rel. sec. ";
private static final String ERR_SEC = ", err. sec. ";
private static final String SYN_SEC = ", syn. sec. ";
private static final String UNDEFINED_SYMBOL = "??"; //TODO
public List<TaggedText> getTaggedText(TaxonRelationship taxonRelationship, boolean reverse, List<Language> languages) {
+ return getTaggedText(taxonRelationship, reverse, languages, false);
+ }
+
+
+
+ public List<TaggedText> getTaggedText(TaxonRelationship taxonRelationship, boolean reverse, List<Language> languages, boolean withoutName) {
if (taxonRelationship == null){
return null;
if (relatedTaxon == null){
return null;
}
- boolean isDoubtful = taxonRelationship.isDoubtful() || relatedTaxon.isDoubtful();
- String doubtfulStr = isDoubtful ? "?" : "";
+
+ String doubtfulTaxonStr = relatedTaxon.isDoubtful() ? DOUBTFUL_TAXON_MARKER : "";
+ String doubtfulRelationStr = taxonRelationship.isDoubtful() ? "?" : "";
+
TaxonName name = relatedTaxon.getName();
-// List<TaggedText> tags = new ArrayList<>();
TaggedTextBuilder builder = new TaggedTextBuilder();
//rel symbol
- String symbol = getSymbol(type, reverse, languages);
+ String symbol = doubtfulRelationStr + getSymbol(type, reverse, languages);
builder.add(TagEnum.symbol, symbol);
//name
- if (isMisapplied){
- //starting quote
- String startQuote = " " + doubtfulStr + QUOTE_START;
- builder.addSeparator(startQuote);// .add(TaggedText.NewSeparatorInstance(startQuote));
-
- //name cache
- List<TaggedText> nameCacheTags = getNameCacheTags(name);
- builder.addAll(nameCacheTags);
-
- //end quote
- String endQuote = QUOTE_END;
- builder.add(TagEnum.postSeparator, endQuote);
+ if (!withoutName){
+ if (isMisapplied){
+ //starting quote
+ String startQuote = " " + doubtfulTaxonStr + QUOTE_START;
+ builder.addSeparator(startQuote);
+
+ //name cache
+ List<TaggedText> nameCacheTags = getNameCacheTags(name);
+ builder.addAll(nameCacheTags);
+
+ //end quote
+ String endQuote = QUOTE_END;
+ builder.add(TagEnum.postSeparator, endQuote);
+ }else{
+ builder.addSeparator(" " + doubtfulTaxonStr);
+ //name full title cache
+ List<TaggedText> nameCacheTags = getNameTitleCacheTags(name);
+ builder.addAll(nameCacheTags);
+ }
}else{
- builder.addSeparator(" " + doubtfulStr);
- //name full title cache
- List<TaggedText> nameCacheTags = getNameTitleCacheTags(name);
- builder.addAll(nameCacheTags);
+ if (isNotBlank(doubtfulTaxonStr)){
+ builder.addSeparator(" " + doubtfulTaxonStr);
+ }
}
-
- //sensu (+ Separatoren?)
+ //sec/sensu (+ Separatoren?)
if (isNotBlank(relatedTaxon.getAppendedPhrase())){
builder.addWhitespace();
builder.add(TagEnum.appendedPhrase, relatedTaxon.getAppendedPhrase());
}
- List<TaggedText> secTags = getSensuTags(relatedTaxon.getSec(), relatedTaxon.getSecMicroReference(),
+ List<TaggedText> secTags = getReferenceTags(relatedTaxon.getSec(), relatedTaxon.getSecMicroReference(),
/* isMisapplied,*/ false);
if (!secTags.isEmpty()) {
builder.addSeparator(isMisapplied? SENSU_SEPARATOR : SEC_SEPARATOR);
// //, non author
if (isMisapplied && name != null){
- if (name.getCombinationAuthorship() != null && isNotBlank(name.getCombinationAuthorship().getNomenclaturalTitle())){
- builder.addSeparator(NON_SEPARATOR);
- builder.add(TagEnum.authors, name.getCombinationAuthorship().getNomenclaturalTitle());
- }else if (isNotBlank(name.getAuthorshipCache())){
+ if (isNotBlank(name.getAuthorshipCache())){
builder.addSeparator(NON_SEPARATOR);
builder.add(TagEnum.authors, name.getAuthorshipCache().trim());
}
}
- List<TaggedText> relSecTags = getSensuTags(taxonRelationship.getCitation(),
+ List<TaggedText> relSecTags = getReferenceTags(taxonRelationship.getCitation(),
taxonRelationship.getCitationMicroReference(),true);
if (!relSecTags.isEmpty()){
builder.addSeparator(isSynonym ? SYN_SEC : isMisapplied ? ERR_SEC : REL_SEC);
return builder.getTaggedText();
}
- private List<TaggedText> getSensuTags(Reference ref, String detail, /*boolean isSensu,*/ boolean isRelation) {
+ private List<TaggedText> getReferenceTags(Reference ref, String detail, /*boolean isSensu,*/ boolean isRelation) {
List<TaggedText> result = new ArrayList<>();
String secRef;
Team team = CdmBase.deproxy(author, Team.class);
String result = null;
int n = team.getTeamMembers().size();
- int index = 0;
+ int index = 1;
if (team.isHasMoreMembers()){
n++;
}
import eu.etaxonomy.cdm.model.taxon.Synonym;
import eu.etaxonomy.cdm.model.taxon.Taxon;
import eu.etaxonomy.cdm.model.taxon.TaxonBase;
+import eu.etaxonomy.cdm.model.taxon.TaxonNode;
/**
* Lucene index class bridge which sets the uuids of the accepted taxon for the
public final static String DOC_KEY_UUID_SUFFIX = ".uuid";
public static final String DOC_KEY_ID_SUFFIX = ".id";
public final static String DOC_KEY_PUBLISH_SUFFIX = ".publish";
-
+ public final static String DOC_KEY_TREEINDEX = "taxonNodes.treeIndex";
+ public final static String DOC_KEY_CLASSIFICATION_ID = "taxonNodes.classification.id";
+ public final static String ACC_TAXON = "accTaxon"; //there are probably still some places not using this constant, but for renaming in future we should try to use it everywhere
@Override
public void set(String name, Object value, Document document,
LuceneOptions luceneOptions) {
String accTaxonUuid = "";
+ boolean isSynonym = false;
Taxon accTaxon;
if(value instanceof Taxon){
accTaxon = (Taxon)value;
}else if (value instanceof Synonym){
accTaxon = ((Synonym)value).getAcceptedTaxon();
+ isSynonym = true;
}else{
throw new RuntimeException("Unhandled taxon base class: " + value.getClass().getSimpleName());
}
luceneOptions.getStore()
);
document.add(accPublishField);
+
+ //treeIndex + Classification
+ if (isSynonym && ACC_TAXON.equals(name)){
+ for (TaxonNode node : accTaxon.getTaxonNodes()){
+ //treeIndex
+ Field treeIndexField;
+ if (node.treeIndex()!= null){ //TODO find out why this happens in TaxonServiceSearchTest.testFindByDescriptionElementFullText_modify_Classification
+ treeIndexField = new StringField(DOC_KEY_TREEINDEX,
+ node.treeIndex(),
+ luceneOptions.getStore()
+ );
+ document.add(treeIndexField);
+ }
+
+ //classification
+ if (node.getClassification() != null){ //should never be null, but who knows
+ Field classificationIdField = new StringField(DOC_KEY_CLASSIFICATION_ID,
+ Integer.toString(node.getClassification().getId()),
+ luceneOptions.getStore()
+ );
+ document.add(classificationIdField);
+ }
+ }
+ }
}
}
}
for(TaxonNode node : taxon.getTaxonNodes()){
if(node.getClassification() != null){
- idFieldBridge.set(name + "taxon.taxonNodes.classification.id", node.getClassification().getId(), document, idFieldOptions);
+ idFieldBridge.set(name + "taxon.taxonNodes.classification.id",
+ node.getClassification().getId(), document, idFieldOptions);
+ }
+ if(node.treeIndex() != null){
+ Field treeIndexField = new StringField("inDescription.taxon.taxonNodes.treeIndex",
+ node.treeIndex(),
+ Store.YES
+ );
+ document.add(treeIndexField);
}
}
}
*/
public class UuidBridge implements StringBridge {
+ @Override
public String objectToString(Object object) {
if(object != null) {
return ((UUID)object).toString();
public void put(CdmBase cdmEntity);
/**
- * load into the cache and return the entity from the cache. The the entity
- * might already exist in the cache. In case the entity in the cache might
- * get updated whereas the returned entity represents is the entity from the
+ * Load into the cache and return the entity from the cache. The entity
+ * might already exist in the cache. In this case the entity in the cache might
+ * get updated whereas the returned entity represents the entity from the
* cache not the <code>cdmEntity</code> passed to this method.
*
* @param cdmEntity
*/
public boolean exists(CdmBase cdmBase);
+ public void dispose();
+
}
import javax.persistence.Embedded;
import javax.persistence.Entity;
+import javax.persistence.Index;
+import javax.persistence.Table;
import javax.persistence.Transient;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import org.apache.log4j.Logger;
import org.hibernate.annotations.Cascade;
import org.hibernate.annotations.CascadeType;
-import org.hibernate.annotations.Index;
-import org.hibernate.annotations.Table;
import org.hibernate.envers.Audited;
import eu.etaxonomy.cdm.model.common.IIntextReferenceTarget;
})
@Entity
@Audited
-@Table(appliesTo="AgentBase", indexes = { @Index(name = "agentTitleCacheIndex", columnNames = { "titleCache" }) })
+@Table(name="AgentBase", indexes = { @Index(name = "agentTitleCacheIndex", columnList = "titleCache") })
public abstract class AgentBase<S extends IIdentifiableEntityCacheStrategy<? extends AgentBase<S>>>
extends IdentifiableMediaEntity<S>
implements IMergable, IMatchable, IIntextReferenceTarget, Cloneable{
package eu.etaxonomy.cdm.model.common;
+import java.util.ArrayList;
import java.util.HashSet;
+import java.util.List;
import java.util.Set;
import java.util.UUID;
}
}
+ public void setAnnotations(Set<Annotation> annotations) {
+ List<Annotation> currentAnnotations = new ArrayList<>(annotations);
+ List<Annotation> annotationsSeen = new ArrayList<>();
+ for(Annotation a : annotations){
+ if(a == null){
+ continue;
+ }
+ if(!currentAnnotations.contains(a)){
+ addAnnotation(a);
+ }
+ annotationsSeen.add(a);
+ }
+ for(Annotation a : currentAnnotations){
+ if(!annotationsSeen.contains(a)){
+ removeAnnotation(a);
+ }
+ }
+ }
+
//********************** CLONE *****************************************/
/**\r
* Copyright (C) 2007 EDIT\r
-* European Distributed Institute of Taxonomy \r
+* European Distributed Institute of Taxonomy\r
* http://www.e-taxonomy.eu\r
-* \r
+*\r
* The contents of this file are subject to the Mozilla Public License Version 1.1\r
* See LICENSE.TXT at the top of this package for the full license terms.\r
*/\r
/**\r
* Common interface for all tree data structures supporting tree indexing.\r
* Mainly used by {@link SaveOrUpdateEventListener} to update the indices.\r
- * \r
+ *\r
* @author a.mueller\r
* @since 12.08.2013\r
*\r
*/\r
public interface ITreeNode<T extends ITreeNode<T>> extends ICdmBase {\r
- \r
+\r
//Constants\r
//the separator used in the tree index to separate the id's of the parent nodes\r
public static final String separator = "#";\r
- \r
+\r
//The prefix used in the tree index for the id of the tree itself\r
public static final String treePrefix = "t";\r
- \r
- \r
+\r
+\r
//METHODS\r
- \r
- \r
+\r
+\r
/**\r
* Returns the tree index of this tree node.\r
* @return the tree index\r
*/\r
public String treeIndex();\r
\r
- \r
+ /**\r
+ * Returns the tree index followed by an SQL wildcard '%'.\r
+ * @see #treeIndex()\r
+ * @see #treeIndexWc()\r
+ */\r
+ public String treeIndexLike();\r
+\r
+ /**\r
+ * Returns the tree index followed by a asterisk wildcard '*'.\r
+ * @see #treeIndex()\r
+ * @see #treeIndexLike()\r
+ */\r
+ public String treeIndexWc();\r
+\r
+\r
/**\r
* Returns the parent node of this node.\r
* Returns <code>null</code> if this\r
/**\r
* Sets the tree index of this node.\r
* @deprecated preliminary implementation for updating the treeindex.\r
- * Maybe removed once index updating is improved.\r
+ * May be removed once index updating is improved.\r
* @param newTreeIndex\r
*/\r
- public void setTreeIndex(String newTreeIndex);\r
- \r
+ @Deprecated\r
+ public void setTreeIndex(String newTreeIndex);\r
+\r
/**\r
* Returns all direct child nodes of this node.\r
* As tree node children do not necessarily need to be\r
* {@link List lists} the return type of this method may change\r
* to {@link Collection} in future. Therefore the use\r
* at the moment is deprecated.\r
- * @deprecated return type may become {@link Collection} in future \r
+ * @deprecated return type may become {@link Collection} in future\r
* @return the list of children\r
*/\r
- public List<T> getChildNodes();\r
+ @Deprecated\r
+ public List<T> getChildNodes();\r
\r
/**\r
* Returns the {@link ICdmBase#getId() id} of the tree object,\r
* use this anymore\r
* @return the id of the tree\r
*/\r
- public int treeId();\r
+ @Deprecated\r
+ public int treeId();\r
\r
}\r
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.FetchType;
+import javax.persistence.Index;
import javax.persistence.ManyToOne;
+import javax.persistence.Table;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
-import org.hibernate.annotations.Index;
-import org.hibernate.annotations.Table;
import org.hibernate.envers.Audited;
import org.hibernate.search.annotations.Field;
})
@Entity
@Audited
-@Table(appliesTo="Identifier", indexes = { @Index(name = "identifierIndex", columnNames = { "identifier" }) })
-public class Identifier<T extends IdentifiableEntity<?>> extends AnnotatableEntity implements Cloneable {
- private static final long serialVersionUID = 3337567049024506936L;
+@Table(name="Identifier", indexes = { @Index(name = "identifierIndex", columnList = "identifier") })
+public class Identifier<T extends IdentifiableEntity<?>>
+ extends AnnotatableEntity
+ implements Cloneable {
+
+ private static final long serialVersionUID = 3337567049024506936L;
@SuppressWarnings("unused")
private static final Logger logger = Logger.getLogger(Identifier.class);
import javax.persistence.Inheritance;
import javax.persistence.InheritanceType;
import javax.persistence.OneToMany;
+import javax.persistence.Table;
import javax.validation.constraints.NotNull;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import org.apache.log4j.Logger;
import org.hibernate.annotations.Cascade;
import org.hibernate.annotations.CascadeType;
-import org.hibernate.annotations.Table;
import org.hibernate.annotations.Type;
import org.hibernate.envers.Audited;
import org.springframework.util.Assert;
@Entity
@Audited
@Inheritance(strategy=InheritanceType.SINGLE_TABLE)
-@Table(appliesTo="OriginalSourceBase")
+@Table(name="OriginalSourceBase")
public abstract class OriginalSourceBase<T extends ISourceable>
extends ReferencedEntityBase
implements IOriginalSource<T>, IIntextReferenceTarget,Cloneable {
+
private static final long serialVersionUID = -1972959999261181462L;
@SuppressWarnings("unused")
private static final Logger logger = Logger.getLogger(OriginalSourceBase.class);
public class TimePeriod implements Cloneable, Serializable {
private static final long serialVersionUID = 3405969418194981401L;
private static final Logger logger = Logger.getLogger(TimePeriod.class);
- public static final DateTimeFieldType MONTH_TYPE = DateTimeFieldType.monthOfYear();
public static final DateTimeFieldType YEAR_TYPE = DateTimeFieldType.year();
+ public static final DateTimeFieldType MONTH_TYPE = DateTimeFieldType.monthOfYear();
public static final DateTimeFieldType DAY_TYPE = DateTimeFieldType.dayOfMonth();
public static final DateTimeFieldType HOUR_TYPE = DateTimeFieldType.hourOfDay();
public static final DateTimeFieldType MINUTE_TYPE = DateTimeFieldType.minuteOfHour();
+ public static final Partial CONTINUED = new Partial
+ (new DateTimeFieldType[]{YEAR_TYPE, MONTH_TYPE, DAY_TYPE},
+ new int[]{9999, 11, 30});
+
@XmlElement(name = "Start")
@XmlJavaTypeAdapter(value = PartialAdapter.class)
@Type(type="partialUserType")
@JsonIgnore // currently used for swagger model scanner
private Partial end;
-
@XmlElement(name = "FreeText")
private String freeText;
start=startDate;
}
public TimePeriod(Partial startDate, Partial endDate) {
- start=startDate;
- end=endDate;
+ start = startDate;
+ end = endDate;
}
//******************* GETTER / SETTER ************************************/
@JsonIgnore // currently used for swagger model scanner
public Partial getEnd() {
- return end;
+ return isContinued() ? null : end;
}
public void setEnd(Partial end) {
}
+ /**
+ * Returns the continued flag (internally stored as a constant
+ * far away date. {@link #CONTINUED}
+ * @return
+ */
+ public boolean isContinued() {
+ return CONTINUED.equals(end);
+ }
+ /**
+ * Sets the (virtual) continued flag.<BR><BR>
+ * NOTE: setting the flag to true, will remove an
+ * existing end date.
+ * @param isContinued
+ */
+ public void setContinued(boolean isContinued) {
+ if (isContinued == true){
+ this.end = CONTINUED;
+ }else if (isContinued()){
+ this.end = null;
+ }
+ }
+
+
//******************* Transient METHODS ************************************/
/**
}
-
- @Transient
- public String getYear(){
- String result = "";
- if (getStartYear() != null){
- result += String.valueOf(getStartYear());
- if (getEndYear() != null){
- result += "-" + String.valueOf(getEndYear());
- }
- }else{
- if (getEndYear() != null){
- result += String.valueOf(getEndYear());
- }
- }
- return result;
- }
-
@Transient
public Integer getStartYear(){
return getPartialValue(start, YEAR_TYPE);
@Transient
public Integer getEndYear(){
- return getPartialValue(end, YEAR_TYPE);
+ return getPartialValue(getEnd(), YEAR_TYPE);
}
@Transient
public Integer getEndMonth(){
- return getPartialValue(end, MONTH_TYPE);
+ return getPartialValue(getEnd(), MONTH_TYPE);
}
@Transient
public Integer getEndDay(){
- return getPartialValue(end, DAY_TYPE);
+ return getPartialValue(getEnd(), DAY_TYPE);
}
public TimePeriod setStartYear(Integer year){
@Transient
private TimePeriod setEndField(Integer value, DateTimeFieldType type)
throws IndexOutOfBoundsException{
- end = setPartialField(end, value, type);
+ end = setPartialField(getEnd(), value, type);
return this;
}
@Override
public String toString(){
String result = null;
-// DateTimeFormatter formatter = TimePeriodPartialFormatter.NewInstance();
if ( StringUtils.isNotBlank(this.getFreeText())){
result = this.getFreeText();
}else{
/**
* Returns the concatenation of <code>start</code> and <code>end</code>
- *
*/
public String getTimePeriod(){
String result = null;
DateTimeFormatter formatter = TimePeriodPartialFormatter.NewInstance();
String strStart = start != null ? start.toString(formatter): null;
- String strEnd = end != null ? end.toString(formatter): null;
- result = CdmUtils.concat("-", strStart, strEnd);
+ if (isContinued()){
+ result = CdmUtils.concat("", strStart, "+");
+ }else{
+ String strEnd = end != null ? end.toString(formatter): null;
+ result = CdmUtils.concat("-", strStart, strEnd);
+ }
+
+ return result;
+ }
+ @Transient
+ public String getYear(){
+ String result = "";
+ if (getStartYear() != null){
+ result += String.valueOf(getStartYear());
+ if (getEndYear() != null){
+ result += "-" + String.valueOf(getEndYear());
+ }
+ }else{
+ if (getEndYear() != null){
+ result += String.valueOf(getEndYear());
+ }
+ }
+ if (isContinued()){
+ result += "+";
+ }
return result;
}
public int hashCode() {
int hashCode = 7;
hashCode = 29*hashCode +
- (start== null? 33: start.hashCode()) +
- (end== null? 39: end.hashCode()) +
- (freeText== null? 41: freeText.hashCode());
+ (start == null? 33: start.hashCode()) +
+ (end == null? 39: end.hashCode()) +
+ (freeText == null? 41: freeText.hashCode());
return hashCode;
}
target.setFreeText(origin.freeText);
}
-
}
}
+ /**
+ * @param subtree
+ * @return
+ */
+ public static TreeIndex NewInstance(TaxonNode node) {
+ if (node == null){
+ return null;
+ }else{
+ return new TreeIndex(node.treeIndex());
+ }
+ }
+
+
/**
* @param stringList
* @return
return result;
}
+ //regEx, we also allow the tree itself to have a tree index (e.g. #t1#)
+ //this may change in future as not necessarily needed
private static String regEx = "#[a-z](\\d+#)+";
private static Pattern pattern = Pattern.compile(regEx);
}
}
+ public boolean isTreeRoot(){
+ int count = 0;
+ for (char c : this.treeIndex.toCharArray()){
+ if (c == '#') {
+ count++;
+ }
+ }
+ return count == 3;
+ }
+
+ public boolean isTree(){
+ int count = 0;
+ for (char c : this.treeIndex.toCharArray()){
+ if (c == '#') {
+ count++;
+ }
+ }
+ return count == 2;
+ }
+
// ********************** STATIC METHODS *****************************/
/**
return result;
}
+
+
}
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.FetchType;
+import javax.persistence.Index;
import javax.persistence.JoinColumn;
import javax.persistence.JoinTable;
import javax.persistence.ManyToMany;
import javax.persistence.OneToMany;
import javax.persistence.OrderBy;
import javax.persistence.OrderColumn;
+import javax.persistence.Table;
import javax.persistence.Transient;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import org.apache.log4j.Logger;
import org.hibernate.annotations.Cascade;
import org.hibernate.annotations.CascadeType;
-import org.hibernate.annotations.Index;
-import org.hibernate.annotations.Table;
import org.hibernate.envers.Audited;
import eu.etaxonomy.cdm.hibernate.HHH_9751_Util;
@XmlRootElement(name = "FeatureNode")
@Entity
@Audited
-@Table(appliesTo="FeatureNode", indexes = { @Index(name = "featureNodeTreeIndex", columnNames = { "treeIndex" }) })
+@Table(name="FeatureNode", indexes = { @Index(name = "featureNodeTreeIndex", columnList = "treeIndex") })
public class FeatureNode extends VersionableEntity
implements ITreeNode<FeatureNode>, Cloneable {
private static final Logger logger = Logger.getLogger(FeatureNode.class);
@Override
public String treeIndex() {
return this.treeIndex;
- }
+ } @Override
+ public String treeIndexLike() {
+ return treeIndex + "%";
+ }
+ @Override
+ public String treeIndexWc() {
+ return treeIndex + "*";
+ }
@Override
@Deprecated
* according to the SDD schema.
*
* @author a.mueller
- * @version 1.0
* @since 08-Jul-2008
*/
@XmlAccessorType(XmlAccessType.FIELD)
* </ul>
*
* @author m.doering
- * @version 1.0
* @since 08-Nov-2007 13:06:57
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlElement(name = "ErrorRadius")
@Field
@NumericField
- private Integer errorRadius = 0;
+ private Integer errorRadius;
@XmlElement(name = "ReferenceSystem")
@XmlIDREF
* E.g. a list of jpg files that represent a scanned article of multiple pages.
*
* @author m.doering
- * @version 1.0
* @since 08-Nov-2007 13:06:34
*/
@XmlAccessorType(XmlAccessType.FIELD)
@Entity
@Audited
@Configurable
-//@Table(appliesTo="Sequence", indexes = { @Index(name = "sequenceTitleCacheIndex", columnNames = { "titleCache" }) })
-public class Sequence extends AnnotatableEntity implements Cloneable{
+//@Table(name="Sequence", indexes = { @Index(name = "sequenceTitleCacheIndex", columnList = "titleCache") })
+public class Sequence
+ extends AnnotatableEntity
+ implements Cloneable{
+
private static final long serialVersionUID = 8298983152731241775L;
private static final Logger logger = Logger.getLogger(Sequence.class);
* One nomenclatural status can be assigned to several taxon names.
*
* @author m.doering
- * @version 1.0
* @since 08-Nov-2007 13:06:39
*/
@XmlAccessorType(XmlAccessType.FIELD)
statusAbbreviation = normalizeStatusAbbrev(statusAbbreviation);
//TODO handle undefined names correctly
- boolean isZooname = name.getNameType().equals(NomenclaturalCode.ICZN);
+ boolean isZooname = name == null? false : name.getNameType().equals(NomenclaturalCode.ICZN);
Map<String, UUID> map = isZooname ? zooAbbrevMap : abbrevMap;
if (map == null ){
import javax.persistence.ManyToMany;
import javax.persistence.ManyToOne;
import javax.persistence.OneToMany;
+import javax.persistence.Table;
import javax.persistence.Transient;
import javax.validation.Valid;
import javax.validation.constraints.Min;
import org.apache.log4j.Logger;
import org.hibernate.annotations.Cascade;
import org.hibernate.annotations.CascadeType;
-import org.hibernate.annotations.Table;
import org.hibernate.annotations.Type;
import org.hibernate.envers.Audited;
import org.hibernate.search.annotations.Analyze;
@Entity
@Audited
@Inheritance(strategy=InheritanceType.SINGLE_TABLE)
-@Table(appliesTo="TaxonName", indexes = {
- @org.hibernate.annotations.Index(name = "taxonNameBaseTitleCacheIndex", columnNames = { "titleCache" }),
- @org.hibernate.annotations.Index(name = "taxonNameBaseNameCacheIndex", columnNames = { "nameCache" }) })
+@Table(name="TaxonName", indexes = {
+ @javax.persistence.Index(name = "taxonNameBaseTitleCacheIndex", columnList = "titleCache"),
+ @javax.persistence.Index(name = "taxonNameBaseNameCacheIndex", columnList = "nameCache") })
@NameMustFollowCode
@CorrectEpithetsForRank(groups = Level2.class)
@NameMustHaveAuthority(groups = Level2.class)
import javax.persistence.Entity;
import javax.persistence.FetchType;
import javax.persistence.ManyToOne;
+import javax.persistence.Table;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import org.apache.log4j.Logger;
import org.hibernate.annotations.Cascade;
import org.hibernate.annotations.CascadeType;
-import org.hibernate.annotations.Table;
import org.hibernate.envers.Audited;
import org.hibernate.search.annotations.Analyze;
import org.hibernate.search.annotations.Field;
//@Indexed(index = "eu.etaxonomy.cdm.model.occurrence.Collection")
@Audited
@Configurable
-@Table(appliesTo="Collection", indexes = { @org.hibernate.annotations.Index(name = "collectionTitleCacheIndex", columnNames = { "titleCache" }) })
-public class Collection extends IdentifiableMediaEntity<IIdentifiableEntityCacheStrategy<Collection>> implements Cloneable{
- private static final long serialVersionUID = -7833674897174732255L;
+@Table(name="Collection", indexes = { @javax.persistence.Index(name = "collectionTitleCacheIndex", columnList = "titleCache") })
+public class Collection
+ extends IdentifiableMediaEntity<IIdentifiableEntityCacheStrategy<Collection>>
+ implements Cloneable{
+
+ private static final long serialVersionUID = -7833674897174732255L;
private static final Logger logger = Logger.getLogger(Collection.class);
@XmlElement(name = "Code")
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.FetchType;
+import javax.persistence.Index;
import javax.persistence.Inheritance;
import javax.persistence.InheritanceType;
import javax.persistence.ManyToMany;
import javax.persistence.ManyToOne;
import javax.persistence.MapKeyJoinColumn;
import javax.persistence.OneToMany;
+import javax.persistence.Table;
import javax.persistence.Transient;
import javax.validation.constraints.NotNull;
import javax.xml.bind.annotation.XmlAccessType;
import org.apache.log4j.Logger;
import org.hibernate.annotations.Cascade;
import org.hibernate.annotations.CascadeType;
-import org.hibernate.annotations.Index;
-import org.hibernate.annotations.Table;
import org.hibernate.annotations.Type;
import org.hibernate.envers.Audited;
import org.hibernate.search.annotations.Analyze;
@Entity
@Audited
@Inheritance(strategy=InheritanceType.SINGLE_TABLE)
-@Table(appliesTo="SpecimenOrObservationBase", indexes = { @Index(name = "specimenOrObservationBaseTitleCacheIndex", columnNames = { "titleCache" }),
- @Index(name = "specimenOrObservationBaseIdentityCacheIndex", columnNames = { "identityCache" }) })
+@Table(name="SpecimenOrObservationBase", indexes = { @Index(name = "specimenOrObservationBaseTitleCacheIndex", columnList = "titleCache"),
+ @Index(name = "specimenOrObservationBaseIdentityCacheIndex", columnList = "identityCache") })
public abstract class SpecimenOrObservationBase<S extends IIdentifiableEntityCacheStrategy<?>>
extends IdentifiableEntity<S>
implements IMultiLanguageTextHolder, IIntextReferenceTarget, IDescribable<DescriptionBase<S>>, IPublishable {
+
private static final long serialVersionUID = 6932680139334408031L;
private static final Logger logger = Logger.getLogger(SpecimenOrObservationBase.class);
import javax.persistence.InheritanceType;
import javax.persistence.Lob;
import javax.persistence.ManyToOne;
+import javax.persistence.Table;
import javax.persistence.Transient;
import javax.validation.constraints.NotNull;
import javax.validation.constraints.Pattern;
import org.apache.log4j.Logger;
import org.hibernate.annotations.Cascade;
import org.hibernate.annotations.CascadeType;
-import org.hibernate.annotations.Table;
import org.hibernate.annotations.Type;
import org.hibernate.envers.Audited;
import org.hibernate.search.annotations.Analyze;
import eu.etaxonomy.cdm.model.common.TimePeriod;
import eu.etaxonomy.cdm.model.common.VerbatimTimePeriod;
import eu.etaxonomy.cdm.model.media.IdentifiableMediaEntity;
+import eu.etaxonomy.cdm.model.name.TaxonName;
import eu.etaxonomy.cdm.strategy.cache.reference.DefaultReferenceCacheStrategy;
import eu.etaxonomy.cdm.strategy.cache.reference.INomenclaturalReferenceCacheStrategy;
import eu.etaxonomy.cdm.strategy.match.Match;
@Entity
@Inheritance(strategy=InheritanceType.SINGLE_TABLE)
@Audited
-@Table(appliesTo="Reference", indexes = { @org.hibernate.annotations.Index(name = "ReferenceTitleCacheIndex", columnNames = { "titleCache" }) })
+@Table(name="Reference", indexes = { @javax.persistence.Index(name = "ReferenceTitleCacheIndex", columnList = "titleCache") })
//@InReference(groups=Level3.class)
@ReferenceCheck(groups=Level2.class)
@InReference(groups=Level3.class)
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.Collections;
+import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
return count;
}
+ /**
+ * Returns the boolean value indicating whether <i>this</i> taxon is a invalid designation
+ * for at least one other taxon.
+ */
+ // TODO cache as for #hasTaxonomicChildren
+ @Transient
+ public boolean isInvalidDesignation(){
+ return computeInvalidDesignationRelations() > 0;
+ }
+
+ /**
+ * Counts the number of invalid designation relationships where this taxon represents the
+ * invalid designation for another taxon.
+ * @return
+ */
+ private int computeInvalidDesignationRelations(){
+ int count = 0;
+ for (TaxonRelationship rel: this.getRelationsFromThisTaxon()){
+ if (rel.getType().isInvalidDesignation()){
+ count++;
+ }
+ }
+ return count;
+ }
+
/**
* Returns the boolean value indicating whether <i>this</i> taxon is a related
* concept for at least one other taxon.
return result;
}
+ /**
+ * @param comparator
+ * @return
+ *
+ * @see #getSynonymsGroups()
+ */
+ @Transient
+ public List<Taxon> getAllMisappliedNames(){
+ List<Taxon> result = new ArrayList<>();
+
+ for (TaxonRelationship rel : this.getRelationsToThisTaxon()){
+ if (rel.getType().isAnyMisappliedName() ){
+ result.add(rel.getFromTaxon());
+ }
+ }
+ sortBySimpleTitleCacheComparator(result);
+ return result;
+ }
+
+ /**
+ * @param comparator
+ * @return
+ *
+ * @see #getSynonymsGroups()
+ */
+ @Transient
+ public List<Taxon> getInvalidDesignations(){
+ List<Taxon> result = new ArrayList<>();
+ for (TaxonRelationship rel : this.getRelationsToThisTaxon()){
+ if (rel.getType().isInvalidDesignation()){
+ result.add(rel.getFromTaxon());
+ }
+ }
+ sortBySimpleTitleCacheComparator(result);
+ return result;
+ }
+
+ /**
+ * @param comparator
+ * @return
+ *
+ * @see #getSynonymsGroups()
+ */
+ @Transient
+ public List<Taxon> getAllProParteSynonyms(){
+ List<Taxon> result = new ArrayList<>();
+
+ for (TaxonRelationship rel : this.getRelationsToThisTaxon()){
+ if (rel.getType().isAnySynonym()){
+ result.add(rel.getFromTaxon());
+ }
+ }
+ sortBySimpleTitleCacheComparator(result);
+ return result;
+ }
+ /**
+ * @param result
+ */
+ private void sortBySimpleTitleCacheComparator(List<Taxon> result) {
+
+ Comparator<Taxon> taxonComparator = new Comparator<Taxon>(){
+
+ @Override
+ public int compare(Taxon o1, Taxon o2) {
+
+ if (o1.getTitleCache() == o2.getTitleCache()){
+ return 0;
+ }
+ if (o1.getTitleCache() == null){
+ return -1;
+ }
+ if (o2.getTitleCache() == null){
+ return 1;
+ }
+ return o1.getTitleCache().compareTo(o2.getTitleCache());
+
+ }
+
+ };
+ Collections.sort(result, taxonComparator);
+ }
+
/**
* Returns the image gallery description. If no image gallery exists, a new one is created using the
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.FetchType;
+import javax.persistence.Index;
import javax.persistence.ManyToOne;
+import javax.persistence.Table;
import javax.persistence.Transient;
import javax.validation.constraints.NotNull;
import javax.xml.bind.annotation.XmlAccessType;
import org.apache.log4j.Logger;
import org.hibernate.annotations.Cascade;
import org.hibernate.annotations.CascadeType;
-import org.hibernate.annotations.Index;
-import org.hibernate.annotations.Table;
import org.hibernate.envers.Audited;
import org.hibernate.search.annotations.Analyze;
import org.hibernate.search.annotations.ClassBridge;
@Entity
@Audited
//@PreFilter("hasPermission(filterObject, 'edit')")
-@Table(appliesTo="TaxonBase", indexes = { @Index(name = "taxonBaseTitleCacheIndex", columnNames = { "titleCache" }) })
+@Table(name="TaxonBase", indexes = { @Index(name = "taxonBaseTitleCacheIndex", columnList = "titleCache") })
@TaxonNameCannotBeAcceptedAndSynonym(groups = Level3.class)
@ClassBridges({
@ClassBridge(name="classInfo",
index = org.hibernate.search.annotations.Index.YES,
store = Store.YES,
impl = ClassInfoBridge.class),
- @ClassBridge(name="accTaxon", // TODO rename to acceptedTaxon, since we are usually not using abbreviations for field names, see also ACC_TAXON_BRIDGE_PREFIX
+ @ClassBridge(name=AcceptedTaxonBridge.ACC_TAXON, // TODO rename to acceptedTaxon, since we are usually not using abbreviations for field names, see also ACC_TAXON_BRIDGE_PREFIX
index = org.hibernate.search.annotations.Index.YES,
store = Store.YES,
impl = AcceptedTaxonBridge.class),
import javax.persistence.OneToMany;
import javax.persistence.OrderBy;
import javax.persistence.OrderColumn;
+import javax.persistence.Table;
import javax.persistence.Transient;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import org.hibernate.LazyInitializationException;
import org.hibernate.annotations.Cascade;
import org.hibernate.annotations.CascadeType;
-import org.hibernate.annotations.Index;
-import org.hibernate.annotations.Table;
import org.hibernate.envers.Audited;
+import org.hibernate.search.annotations.Analyze;
import org.hibernate.search.annotations.ContainedIn;
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Index;
import org.hibernate.search.annotations.IndexedEmbedded;
+import org.hibernate.search.annotations.Store;
import eu.etaxonomy.cdm.hibernate.HHH_9751_Util;
import eu.etaxonomy.cdm.hibernate.HibernateProxyHelper;
//@Indexed disabled to reduce clutter in indexes, since this type is not used by any search
//@Indexed(index = "eu.etaxonomy.cdm.model.taxon.TaxonNode")
@Audited
-@Table(appliesTo="TaxonNode", indexes = { @Index(name = "taxonNodeTreeIndex", columnNames = { "treeIndex" }) })
+@Table(name="TaxonNode", indexes = { @javax.persistence.Index(name = "taxonNodeTreeIndex", columnList = "treeIndex") })
@ChildTaxaMustBeLowerRankThanParent(groups = Level3.class)
@ChildTaxaMustNotSkipRanks(groups = Level3.class)
@ChildTaxaMustDeriveNameFromParent(groups = Level3.class)
-public class TaxonNode extends AnnotatableEntity implements ITaxonTreeNode, ITreeNode<TaxonNode>, Cloneable{
+public class TaxonNode
+ extends AnnotatableEntity
+ implements ITaxonTreeNode, ITreeNode<TaxonNode>, Cloneable{
+
private static final long serialVersionUID = -4743289894926587693L;
private static final Logger logger = Logger.getLogger(TaxonNode.class);
@XmlElement(name = "treeIndex")
@Column(length=255)
+ @Field(store = Store.YES, index = Index.YES, analyze = Analyze.NO)
private String treeIndex;
public void setTreeIndex(String treeIndex) {
this.treeIndex = treeIndex;
}
+ @Override
+ public String treeIndexLike() {
+ return treeIndex + "%";
+ }
+ @Override
+ public String treeIndexWc() {
+ return treeIndex + "*";
+ }
}
/**
- * Whether this TaxonNode is a descendant of the given TaxonNode
- *
- * Caution: use this method with care on big branches. -> performance and memory hungry
- *
- * Protip: Try solving your problem with the isAscendant method which traverses the tree in the
- * other direction (up). It will always result in a rather small set of consecutive parents beeing
- * generated.
- *
- * TODO implement more efficiently without generating the set of descendants first
+ * Whether this TaxonNode is a descendant of (or equal to) the given TaxonNode
*
* @param possibleParent
- * @return true if this is a descendant
+ * @return <code>true</code> if <b>this</b> is a descendant
*/
@Transient
public boolean isDescendant(TaxonNode possibleParent){
- if (this.treeIndex() == null || possibleParent.treeIndex() == null) {
+ if (possibleParent == null || this.treeIndex() == null
+ || possibleParent.treeIndex() == null) {
return false;
}
- return possibleParent == null ? false : this.treeIndex().startsWith(possibleParent.treeIndex() );
+ return this.treeIndex().startsWith(possibleParent.treeIndex() );
}
/**
- * Whether this TaxonNode is an ascendant of the given TaxonNode
+ * Whether this TaxonNode is an ascendant of (or equal to) the given TaxonNode.
+ *
*
* @param possibleChild
- * @return true if there are ascendants
+ * @return <code>true</code> if <b>this</b> is a ancestor of the given child parameter
*/
@Transient
public boolean isAncestor(TaxonNode possibleChild){
- if (this.treeIndex() == null || possibleChild.treeIndex() == null) {
+ if (possibleChild == null || this.treeIndex() == null || possibleChild.treeIndex() == null) {
return false;
}
// return possibleChild == null ? false : possibleChild.getAncestors().contains(this);
- return possibleChild == null ? false : possibleChild.treeIndex().startsWith(this.treeIndex());
+ return possibleChild.treeIndex().startsWith(this.treeIndex());
}
/**
return childNodes.size() > 0;
}
-
public boolean hasTaxon() {
return (taxon!= null);
}
public boolean isMisappliedNameOrInvalidDesignation(){
if (this.isAnyMisappliedName()){
return true;
- }else if (this.equals(INVALID_DESIGNATION_FOR())){
+ }else if (isInvalidDesignation()){
+ return true;
+ }
+ return false;
+ }
+
+ /**
+ * <code>true</code> if this relationship type is an
+ * {@link #INVALID_DESIGNATION_FOR() invalid designation}
+ *
+ * @see #isAnyMisappliedName()()
+ */
+ public boolean isInvalidDesignation(){
+ if (this.equals(INVALID_DESIGNATION_FOR())){
return true;
}
return false;
*/
package eu.etaxonomy.cdm.strategy.cache.reference;
+import java.util.List;
import java.util.UUID;
import org.apache.commons.lang.StringUtils;
import org.joda.time.format.DateTimeFormatter;
import eu.etaxonomy.cdm.common.CdmUtils;
+import eu.etaxonomy.cdm.hibernate.HibernateProxyHelper;
+import eu.etaxonomy.cdm.model.agent.Person;
+import eu.etaxonomy.cdm.model.agent.Team;
import eu.etaxonomy.cdm.model.agent.TeamOrPersonBase;
import eu.etaxonomy.cdm.model.common.CdmBase;
import eu.etaxonomy.cdm.model.common.VerbatimTimePeriod;
return stringBuilder.toString();
}
+ /**
+ * @param reference
+ * @return
+ */
+ public String createShortCitation(Reference reference) {
+ TeamOrPersonBase<?> authorship = reference.getAuthorship();
+ String shortCitation = "";
+ if (authorship == null) {
+ return null;
+ }
+ authorship = HibernateProxyHelper.deproxy(authorship);
+ if (authorship instanceof Person){
+ shortCitation = ((Person)authorship).getFamilyName();
+ if (StringUtils.isBlank(shortCitation) ){
+ shortCitation = ((Person)authorship).getTitleCache();
+ }
+ }
+ else if (authorship instanceof Team){
+
+ Team authorTeam = HibernateProxyHelper.deproxy(authorship, Team.class);
+ int index = 0;
+
+ for (Person teamMember : authorTeam.getTeamMembers()){
+ index++;
+ if (index == 3){
+ shortCitation += " & al.";
+ break;
+ }
+ String concat = concatString(authorTeam, authorTeam.getTeamMembers(), index, ", ", " & ");
+ if (teamMember.getFamilyName() != null){
+ shortCitation += concat + teamMember.getFamilyName();
+ }else{
+ shortCitation += concat + teamMember.getTitleCache();
+ }
+
+ }
+ if (StringUtils.isBlank(shortCitation)){
+ shortCitation = authorTeam.getTitleCache();
+ }
+
+ }
+ if (reference.getDatePublished() != null) {
+ if (!StringUtils.isBlank(reference.getDatePublished().getFreeText())){
+ shortCitation = shortCitation + " (" + reference.getDatePublished().getFreeText() + ")";
+ }else if (!StringUtils.isBlank(reference.getYear()) ){
+ shortCitation = shortCitation + " (" + reference.getYear() + ")";
+ }
+ }
+
+ return shortCitation;
+ }
+
+ private static String concatString(Team team, List<Person> teamMembers, int i, String std_team_concatination, String final_team_concatination) {
+ String concat;
+ if (i <= 1){
+ concat = "";
+ }else if (i < teamMembers.size() || ( team.isHasMoreMembers() && i == teamMembers.size())){
+ concat = std_team_concatination;
+ }else{
+ concat = final_team_concatination;
+ }
+ return concat;
+ }
+
+
@Override
public String getNomenclaturalCache(Reference reference) {
return this.getNomenclaturalCitation(reference, null);
import eu.etaxonomy.cdm.hibernate.HibernateProxyHelper;
import eu.etaxonomy.cdm.model.agent.Person;
import eu.etaxonomy.cdm.model.agent.Team;
+import eu.etaxonomy.cdm.model.common.CdmBase;
import eu.etaxonomy.cdm.model.name.TaxonName;
import eu.etaxonomy.cdm.model.reference.Reference;
import eu.etaxonomy.cdm.model.taxon.Synonym;
if (sec.getAuthorship() != null){
if (sec.getAuthorship().isInstanceOf(Team.class)){
- Team authorTeam = HibernateProxyHelper.deproxy(sec.getAuthorship(), Team.class);
+ Team authorTeam = CdmBase.deproxy(sec.getAuthorship(), Team.class);
if (authorTeam.getTeamMembers().size() > 2){
if (authorTeam.getTeamMembers().get(0).getFamilyName() != null){
result = authorTeam.getTeamMembers().get(0).getFamilyName() + " & al.";
//case fl. 1806 or c. 1806 or fl. 1806?
private static final Pattern prefixedYearPattern = Pattern.compile("(fl|c)\\.\\s*\\d{4}(\\s*-\\s*\\d{4})?\\??");
//standard
- private static final Pattern standardPattern = Pattern.compile("\\s*\\d{2,4}(\\s*-(\\s*\\d{2,4})?)?");
+ private static final Pattern standardPattern = Pattern.compile("\\s*\\d{2,4}(\\s*-(\\s*\\d{2,4})?|\\+)?");
private static final String strDotDate = "[0-3]?\\d\\.[01]?\\d\\.\\d{4,4}";
- private static final String strDotDatePeriodPattern = String.format("%s(\\s*-\\s*%s?)?", strDotDate, strDotDate);
+ private static final String strDotDatePeriodPattern = String.format("%s(\\s*-\\s*%s|\\+)?", strDotDate, strDotDate);
private static final Pattern dotDatePattern = Pattern.compile(strDotDatePeriodPattern);
private static final String strSlashDate = "[0-3]?\\d\\/[01]?\\d\\/\\d{4,4}";
- private static final String strSlashDatePeriodPattern = String.format("%s(\\s*-\\s*%s?)?", strSlashDate, strSlashDate);
+ private static final String strSlashDatePeriodPattern = String.format("%s(\\s*-\\s*%s|\\+)?", strSlashDate, strSlashDate);
private static final Pattern slashDatePattern = Pattern.compile(strSlashDatePeriodPattern);
private static final Pattern lifeSpanPattern = Pattern.compile(String.format("%s--%s", firstYearPattern, firstYearPattern));
private static final String strMonthes = "((Jan|Feb|Aug|Sept?|Oct(ober)?|Nov|Dec)\\.?|(Mar(ch)?|Apr(il)?|May|June?|July?))";
- private static final String strDateWithMonthes = "([0-3]?\\d" + dotOrWs + ")?" + strMonthes + dotOrWs + "\\d{4,4}";
+ private static final String strDateWithMonthes = "([0-3]?\\d" + dotOrWs + ")?" + strMonthes + dotOrWs + "\\d{4,4}\\+?";
private static final Pattern dateWithMonthNamePattern = Pattern.compile(strDateWithMonthes);
public static <T extends TimePeriod> T parseString(T timePeriod, String periodString){
result.setFreeText(null);
- //case "1806"[1807];
+ //case "1806"[1807]; => TODO this should (and is?) handled in parse verbatim, should be removed here
if (uncorrectYearPatter.matcher(periodString).matches()){
result.setFreeText(periodString);
String realYear = periodString.split("\\[")[1];
realYear = realYear.replace("]", "");
result.setStartYear(Integer.valueOf(realYear));
result.setFreeText(periodString);
- //case fl. 1806 or c. 1806 or fl. 1806?
+ //case fl. 1806 or c. 1806 or fl. 1806? => TODO questionable if this should really be handled here, fl. probably stands for flowering and is not part of the date but of the date context. What stands "c." for? Used by Markup import?
}else if(prefixedYearPattern.matcher(periodString).matches()){
result.setFreeText(periodString);
Matcher yearMatcher = firstYearPattern.matcher(periodString);
result.setFreeText(periodString);
}else {
try {
+ dtEnd = handleContinued(dates, dtEnd);
//start
- if (! StringUtils.isBlank(dates[0])){
+ if (isNotBlank(dates[0])){
dtStart = parseSingleSlashDate(dates[0].trim());
}
//end
- if (dates.length >= 2 && ! StringUtils.isBlank(dates[1])){
+ if (dates.length >= 2 && isNotBlank(dates[1])){
dtEnd = parseSingleSlashDate(dates[1].trim());
}
result.setFreeText(periodString);
}
}
-
}
result.setFreeText(periodString);
}else {
try {
+ dtEnd = handleContinued(dates, dtEnd);
//start
if (! StringUtils.isBlank(dates[0])){
- dtStart = parseSingleDotDate(dates[0].trim());
+ dtStart = parseSingleDotDate(dates[0].trim());
}
//end
}
}
+ /**
+ * Checks if dates is a "continued" date (e.g. 2017+).
+ * If yes, dtEnd is returned as {@link TimePeriod#CONTINUED} and dates[0] is shortened by "+".
+ * @param dates
+ * @param dtEnd
+ * @return
+ */
+ protected static Partial handleContinued(String[] dates, Partial dtEnd) {
+ if (dates.length == 1 && dates[0].endsWith("+") && dates[0].length()>1){
+ dates[0] = dates[0].substring(0, dates[0].length()-1).trim();
+ dtEnd = TimePeriod.CONTINUED;
+ }
+ return dtEnd;
+ }
+
/**
* @param dateString
private static void parseDateWithMonthName(String dateString, TimePeriod result) {
String[] dates = dateString.split("(\\.|\\s+)+");
-
-
if (dates.length > 3 || dates.length < 2){
logger.warn("Not 2 or 3 date parts in date string: " + dateString);
result.setFreeText(dateString);
String strMonth = hasNoDay? dates[0] : dates[1];
String strDay = hasNoDay? null : dates[0];
try {
+ if (strYear.endsWith("+")){
+ strYear = strYear.substring(0, strYear.length()-1).trim();
+ result.setContinued(true);
+ }
Integer year = Integer.valueOf(strYear.trim());
Integer month = monthNrFormName(strMonth.trim());
Integer day = strDay == null ? null : Integer.valueOf(strDay.trim());
}
-
+ //TODO "continued" not yet handled, probably looks different here (e.g. 2017--x)
private static void parseLifeSpanPattern(String periodString, TimePeriod result) {
try{
logger.warn("More than 1 '-' in period String: " + periodString);
}else {
try {
+ dtEnd = handleContinued(years, dtEnd);
//start
if (! StringUtils.isBlank(years[0])){
dtStart = parseSingleDate(years[0].trim());
* @return the parsed period
*/
private static TimePeriod parseEnglishDate(String strFrom, String strTo, boolean isAmerican) {
- Partial dateFrom = parseSingleEnglishDate(strFrom, isAmerican);
Partial dateTo = parseSingleEnglishDate(strTo, isAmerican);
+ if (strFrom.endsWith("+") && dateTo == null){
+ dateTo = TimePeriod.CONTINUED;
+ strFrom = strFrom.substring(0, strFrom.length()-1).trim();
+ }
+
+ Partial dateFrom = parseSingleEnglishDate(strFrom, isAmerican);
TimePeriod result = TimePeriod.NewInstance(dateFrom, dateTo);
return result;
}
return result;
}
+
+ private static boolean isBlank(String str){
+ return StringUtils.isBlank(str);
+ }
+ private static boolean isNotBlank(String str){
+ return StringUtils.isNotBlank(str);
+ }
+
}
import eu.etaxonomy.cdm.format.taxon.TaxonRelationshipFormatter;
import eu.etaxonomy.cdm.model.agent.Person;
+import eu.etaxonomy.cdm.model.agent.Team;
import eu.etaxonomy.cdm.model.common.DefaultTermInitializer;
import eu.etaxonomy.cdm.model.common.Language;
import eu.etaxonomy.cdm.model.common.VerbatimTimePeriod;
*/
public class TaxonRelationshipFormatterTest {
+ private static boolean WITHOUT_NAME = true;
+
private TaxonRelationship taxonRel;
private Reference relSec;
private TaxonRelationshipFormatter formatter;
private boolean reverse;
- Person toNameAuthor;
+ private Person toNameAuthor;
+ private Person macFarlane;
+ private Person cheek;
+ private Person toSecAuthor;
private List<Language> languages;
fromSec = ReferenceFactory.newGeneric();
fromSec.setTitle("From Sec");
String initials = "J.M.";
- fromSec.setAuthorship(Person.NewInstance(null, "Macfarlane", initials, null));
+ macFarlane = Person.NewInstance(null, "Macfarlane", initials, null);
+ fromSec.setAuthorship(macFarlane);
fromSec.setDatePublished(VerbatimTimePeriod.NewVerbatimInstance(1918));
relSec = ReferenceFactory.newGeneric();
relSec.setTitle("From rel reference");
initials = null; //"M.R.";
- relSec.setAuthorship(Person.NewInstance(null, "Cheek", initials, null));
+ cheek = Person.NewInstance(null, "Cheek", initials, null);
+ relSec.setAuthorship(cheek);
relSec.setDatePublished(VerbatimTimePeriod.NewVerbatimInstance(1919));
toSec = ReferenceFactory.newGeneric();
toSec.setTitle("To Sec");
- toSec.setAuthorship(Person.NewTitledInstance("ToSecAuthor"));
+ toSecAuthor = Person.NewTitledInstance("ToSecAuthor");
+ toSec.setAuthorship(toSecAuthor);
toSec.setDatePublished(VerbatimTimePeriod.NewVerbatimInstance(1928));
fromTaxon = Taxon.NewInstance(fromName, fromSec);
tags = formatter.getTaggedText(taxonRel, reverse, languages);
str = TaggedCacheHelper.createString(tags);
System.out.println(str);
- Assert.assertEquals(inverseSymbol + " ?\"Abies alba\" auct., err. sec. Cheek 1919: 123", str);
+ Assert.assertEquals(inverseSymbol + " ?\u202F\"Abies alba\" auct., err. sec. Cheek 1919: 123", str);
}
toTaxon.setAppendedPhrase("");
tags = formatter.getTaggedText(taxonRel, reverse, languages);
str = TaggedCacheHelper.createString(tags);
- Assert.assertEquals(SYMBOL + " ?Pinus pinova Mill. sec. ???, rel. sec. Cheek 1919: 123", str);
+ Assert.assertEquals("?" + SYMBOL + " Pinus pinova Mill. sec. ???, rel. sec. Cheek 1919: 123", str);
}
}
+ @Test
+ public void testGetFamilyNames() {
+
+ //Test start condition with single person
+ List<TaggedText> tags = formatter.getTaggedText(taxonRel, reverse, languages);
+ String str = TaggedCacheHelper.createString(tags);
+ Assert.assertFalse("Formatted text should not contain the team correctly formatted", str.contains("Macfarlane & Cheek"));
+
+ //use team
+ Team secRelTeam = Team.NewInstance();
+ secRelTeam.addTeamMember(macFarlane);
+ secRelTeam.addTeamMember(cheek);
+ relSec.setAuthorship(secRelTeam);
+
+ tags = formatter.getTaggedText(taxonRel, reverse, languages);
+ str = TaggedCacheHelper.createString(tags);
+ System.out.println(str);
+ Assert.assertTrue(str.contains("rel. sec. Macfarlane & Cheek 1919"));
+
+ //add third member
+ secRelTeam.addTeamMember(toSecAuthor);
+ tags = formatter.getTaggedText(taxonRel, reverse, languages);
+ str = TaggedCacheHelper.createString(tags);
+ System.out.println(str);
+ Assert.assertTrue(str.contains("rel. sec. Macfarlane, Cheek & ToSecAuthor 1919"));
+
+ //add et al.
+ secRelTeam.setHasMoreMembers(true);
+ tags = formatter.getTaggedText(taxonRel, reverse, languages);
+ str = TaggedCacheHelper.createString(tags);
+ System.out.println(str);
+ Assert.assertTrue(str.contains("rel. sec. Macfarlane, Cheek, ToSecAuthor & al. 1919"));
+
+ }
+
+ @Test
+ public void testGetTaggedTextMisappliedNameWithoutName() {
+
+ reverse = true;
+ String inverseSymbol = TaxonRelationshipType.MISAPPLIED_NAME_FOR().getInverseSymbol();
+ String symbol = TaxonRelationshipType.MISAPPLIED_NAME_FOR().getSymbol();
+
+ List<TaggedText> tags = formatter.getTaggedText(taxonRel, reverse, languages, WITHOUT_NAME);
+ String str = TaggedCacheHelper.createString(tags);
+ Assert.assertEquals(inverseSymbol + " sensu Macfarlane 1918, err. sec. Cheek 1919: 123", str);
+
+ //reverse
+ tags = formatter.getTaggedText(taxonRel, !reverse, languages, WITHOUT_NAME);
+ str = TaggedCacheHelper.createString(tags);
+ Assert.assertEquals(symbol + " sec. ToSecAuthor 1928, rel. sec. Cheek 1919: 123", str);
+
+ //auctores
+ fromTaxon.setAppendedPhrase("auctores");
+ tags = formatter.getTaggedText(taxonRel, reverse, languages, WITHOUT_NAME);
+ str = TaggedCacheHelper.createString(tags);
+ Assert.assertEquals(inverseSymbol + " auctores sensu Macfarlane 1918, err. sec. Cheek 1919: 123", str);
+
+ fromTaxon.setSec(null);
+ fromTaxon.setAppendedPhrase("");
+ tags = formatter.getTaggedText(taxonRel, reverse, languages, WITHOUT_NAME);
+ str = TaggedCacheHelper.createString(tags);
+ Assert.assertEquals(inverseSymbol + " auct., err. sec. Cheek 1919: 123", str);
+
+ fromTaxon.setDoubtful(true);
+ tags = formatter.getTaggedText(taxonRel, reverse, languages, WITHOUT_NAME);
+ str = TaggedCacheHelper.createString(tags);
+ System.out.println(str);
+ Assert.assertEquals(inverseSymbol + " ?\u202F auct., err. sec. Cheek 1919: 123", str);
+
+ }
+
+ @Test
+ public void testGetTaggedTextConceptRelationsWithoutName() {
+
+ reverse = false;
+
+ TaxonRelationshipType relType = TaxonRelationshipType.INCLUDES();
+
+ final String SYMBOL = relType.getSymbol();
+
+ taxonRel.setType(relType);
+ List<TaggedText> tags = formatter.getTaggedText(taxonRel, reverse, languages, WITHOUT_NAME);
+ String str = TaggedCacheHelper.createString(tags);
+ Assert.assertEquals(SYMBOL + " sec. ToSecAuthor 1928, rel. sec. Cheek 1919: 123", str);
+
+ tags = formatter.getTaggedText(taxonRel, !reverse, languages, WITHOUT_NAME);
+ str = TaggedCacheHelper.createString(tags);
+ Assert.assertEquals(relType.getInverseSymbol() + " sec. Macfarlane 1918, rel. sec. Cheek 1919: 123", str);
+
+ toTaxon.setAppendedPhrase("sensu stricto");
+ tags = formatter.getTaggedText(taxonRel, reverse, languages, WITHOUT_NAME);
+ str = TaggedCacheHelper.createString(tags);
+ Assert.assertEquals(SYMBOL + " sensu stricto sec. ToSecAuthor 1928, rel. sec. Cheek 1919: 123", str);
+
+ toTaxon.setSec(null);
+ toTaxon.setAppendedPhrase("");
+ tags = formatter.getTaggedText(taxonRel, reverse, languages, WITHOUT_NAME);
+ str = TaggedCacheHelper.createString(tags);
+ Assert.assertEquals(SYMBOL + " sec. ???, rel. sec. Cheek 1919: 123", str);
+
+ taxonRel.setDoubtful(true);
+ toTaxon.setAppendedPhrase("");
+ tags = formatter.getTaggedText(taxonRel, reverse, languages, WITHOUT_NAME);
+ str = TaggedCacheHelper.createString(tags);
+ Assert.assertEquals("?" + SYMBOL + " sec. ???, rel. sec. Cheek 1919: 123", str);
+
+ }
+
}
Assert.assertEquals("Year should be 1999", "1999", tp.getYear());\r
tp.setEndYear(2002);\r
Assert.assertEquals("Year should be 1999-2002", "1999-2002", tp.getYear());\r
+ tp.setContinued(true);\r
+ Assert.assertEquals("Year should be 1999+", "1999+", tp.getYear());\r
}\r
\r
\r
Assert.assertEquals("3.xx.1788-1799", tp1.toString());\r
tp1.setEndMonth(11);\r
Assert.assertEquals("3.xx.1788-11.1799", tp1.toString());\r
+ tp1.setContinued(true);\r
+ Assert.assertEquals("3.xx.1788+", tp1.toString());\r
+\r
+ tp1 = TimePeriod.NewInstance(1788,1799);\r
+ tp1.setContinued(true);\r
+ Assert.assertEquals("1788+", tp1.toString());\r
+ tp1 = TimePeriod.NewInstance((Integer)null);\r
+ tp1.setContinued(true);\r
+ //this is still undefined, could be something like 'xxxx+' in future\r
+ Assert.assertEquals("+", tp1.toString());\r
}\r
\r
+ @Test\r
+ public void testContinued() {\r
+ TimePeriod tp1 = TimePeriod.NewInstance(2017, 2018);\r
+ Assert.assertEquals((Integer)2018, tp1.getEndYear());\r
+ tp1.setContinued(true);\r
+ Assert.assertNull("The end should be removed and also the CONTINUED constant should be returned for getEnd()", tp1.getEnd());\r
+ Assert.assertTrue(tp1.isContinued());\r
+ Assert.assertEquals(null, tp1.getEndYear());\r
+ Assert.assertEquals(null, tp1.getEndMonth());\r
+ Assert.assertEquals(null, tp1.getEndDay());\r
+\r
+ //set continued to false (will not recover old end value)\r
+ tp1.setContinued(false);\r
+ Assert.assertFalse(tp1.isContinued());\r
+ Assert.assertEquals(null, tp1.getEndYear());\r
+ Assert.assertEquals(null, tp1.getEndMonth());\r
+ Assert.assertEquals(null, tp1.getEndDay());\r
+\r
+ //replace continued by end\r
+ tp1 = TimePeriod.NewInstance(2017, 2018);\r
+ tp1.setContinued(true);\r
+ Assert.assertTrue(tp1.isContinued());\r
+ tp1.setEndMonth(month);\r
+ Assert.assertFalse(tp1.isContinued());\r
+ Assert.assertEquals(null, tp1.getEndYear());\r
+ Assert.assertEquals(month, tp1.getEndMonth());\r
+ Assert.assertEquals(null, tp1.getEndDay());\r
+\r
+ }\r
+\r
+\r
\r
/**\r
* Test method for {@link eu.etaxonomy.cdm.model.common.TimePeriod#clone()}.\r
--- /dev/null
+/**
+* Copyright (C) 2018 EDIT
+* European Distributed Institute of Taxonomy
+* http://www.e-taxonomy.eu
+*
+* The contents of this file are subject to the Mozilla Public License Version 1.1
+* See LICENSE.TXT at the top of this package for the full license terms.
+*/
+package eu.etaxonomy.cdm.model.common;
+
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+/**
+ * @author a.mueller
+ * @since 12.09.2018
+ *
+ */
+public class TreeIndexTest {
+
+ TreeIndex indexRoot;
+ TreeIndex indexTree;
+
+ @Before
+ public void setUp() throws Exception {
+ indexRoot = TreeIndex.NewInstance("#t1#222#");
+ //maybe not allowed in future, see comment on TreeIndex.regEx
+ indexTree = TreeIndex.NewInstance("#t1#");
+ }
+
+
+ @Test
+ public void testParse() {
+ try {
+ indexTree = TreeIndex.NewInstance("#t1#11");
+ Assert.fail("Index string must end with #");
+ } catch (Exception e) {}
+ try {
+ indexTree = TreeIndex.NewInstance("t1#11#");
+ Assert.fail("Index string must start with #");
+ } catch (Exception e) {}
+
+ try {
+ indexTree = TreeIndex.NewInstance("#1#11#");
+ Assert.fail("Index must start with tree identifier which starts with a single character a-z");
+ } catch (Exception e) {}
+
+ try {
+ indexTree = TreeIndex.NewInstance("#tt1#11#");
+ Assert.fail("Tree identifier must have only 1 character a-z");
+ } catch (Exception e) {}
+
+ try {
+ indexTree = TreeIndex.NewInstance("#t1#t11#");
+ Assert.fail("Node identifier must have no character a-z");
+ } catch (Exception e) {}
+
+ }
+
+ @Test
+ public void testIsTreeRoot() {
+ Assert.assertTrue("Index should be tree root", indexRoot.isTreeRoot());
+ Assert.assertFalse("Index should not be tree", indexRoot.isTree());
+ }
+
+ @Test
+ public void testIsTree() {
+ Assert.assertFalse("Index should not be tree root", indexTree.isTreeRoot());
+ Assert.assertTrue("Index should be tree", indexTree.isTree());
+ }
+
+}
import org.apache.log4j.Logger;
import org.joda.time.DateTimeFieldType;
import org.joda.time.Partial;
-import org.junit.After;
-import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.Before;
-import org.junit.BeforeClass;
import org.junit.Test;
import eu.etaxonomy.cdm.common.UTF8;
// private TimePeriod noStartAndEndYear;
- /**
- * @throws java.lang.Exception
- */
- @BeforeClass
- public static void setUpBeforeClass() throws Exception {
- }
-
- /**
- * @throws java.lang.Exception
- */
- @AfterClass
- public static void tearDownAfterClass() throws Exception {
- }
-
/**
* @throws java.lang.Exception
*/
// noStartAndEndYear = TimePeriod.NewInstance(start, end);
}
- /**
- * @throws java.lang.Exception
- */
- @After
- public void tearDown() throws Exception {
- }
-
//************************ TESTS ******************************************
Assert.assertEquals(Integer.valueOf(1), tp.getEndMonth());
Assert.assertEquals(Integer.valueOf(2), tp.getEndDay());
+ }
+
+ @Test
+ public void testSlashPattern() {
+
String strSlashDate = "31/12/2015 - 2/1/2016";
- tp = TimePeriodParser.parseString(strSlashDate);
+ TimePeriod tp = TimePeriodParser.parseString(strSlashDate);
assertNotNull(tp);
Assert.assertEquals("31.12.2015-2.1.2016", tp.toString());
Assert.assertEquals("2015-2016", tp.getYear());
Assert.assertEquals(null, tp.getVerbatimDate());
}
+ @Test
+ public void testParseContinued() {
+ String strDate = "01.12.1957+";
+ TimePeriod tp = TimePeriodParser.parseString(strDate);
+ Assert.assertTrue(tp.isContinued());
+ Assert.assertEquals("1.12.1957+", tp.toString());
+ Assert.assertEquals(Integer.valueOf(1957), tp.getStartYear());
+ Assert.assertEquals(Integer.valueOf(12), tp.getStartMonth());
+ Assert.assertEquals(Integer.valueOf(1), tp.getStartDay());
+ Assert.assertNull(tp.getEnd());
+
+ strDate = "1957+";
+ tp = TimePeriodParser.parseString(strDate);
+ Assert.assertTrue(tp.isContinued());
+ Assert.assertEquals("1957+", tp.toString());
+ Assert.assertEquals(Integer.valueOf(1957), tp.getStartYear());
+ Assert.assertNull(tp.getStartMonth());
+ Assert.assertNull(tp.getStartDay());
+ Assert.assertNull(tp.getEnd());
+
+ strDate = "24 Aug. 1957+";
+ tp = TimePeriodParser.parseString(strDate);
+ Assert.assertEquals("24.8.1957+", tp.toString());
+ Assert.assertTrue(tp.isContinued());
+ Assert.assertEquals("1957+", tp.getYear());
+ Assert.assertEquals(Integer.valueOf(1957), tp.getStartYear());
+ Assert.assertEquals(Integer.valueOf(8), tp.getStartMonth());
+ Assert.assertEquals(Integer.valueOf(24), tp.getStartDay());
+
+ String strSlashDate = "31/12/2015+";
+ tp = TimePeriodParser.parseString(strSlashDate);
+ Assert.assertEquals("31.12.2015+", tp.toString());
+ Assert.assertTrue(tp.isContinued());
+ Assert.assertEquals("2015+", tp.getYear());
+ Assert.assertEquals(Integer.valueOf(2015), tp.getStartYear());
+ Assert.assertEquals(Integer.valueOf(12), tp.getStartMonth());
+ Assert.assertEquals(Integer.valueOf(31), tp.getStartDay());
+ Assert.assertNull(tp.getEnd());
+
+ }
}
<parent>
<groupId>eu.etaxonomy</groupId>
<artifactId>cdmlib-parent</artifactId>
- <version>5.2.0</version>
+ <version>5.3.0</version>
<relativePath>../pom.xml</relativePath>
</parent>
*/
public StringBuffer createReport() {
StringBuffer report = new StringBuffer("");
+ addShortDescription(report);
addErrorReport(report, "Errors", errors);
addErrorReport(report, "Exceptions", exceptions);
addErrorReport(report, "Warnings", warnings);
}
+
+
+
+ /**
+ * @param report
+ */
+ protected void addShortDescription(StringBuffer report) {
+ //do nothing
+
+ }
/**
* @param report
* @param label
*/
public List<T> list(T example, Set<String> includeProperties, Integer limit, Integer start, List<OrderHint> orderHints, List<String> propertyPaths);
- List<T> findByParamWithRestrictions(Class<? extends T> clazz, String param, String queryString, MatchMode matchmode, List<Restriction<?>> restrictions, Integer pageSize,
+ public <S extends T> List<S> findByParamWithRestrictions(Class<S> clazz, String param, String queryString, MatchMode matchmode, List<Restriction<?>> restrictions, Integer pageSize,
Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths);
- long countByParamWithRestrictions(Class<? extends T> clazz, String param, String queryString, MatchMode matchmode, List<Restriction<?>> restrictions);
+ public long countByParamWithRestrictions(Class<? extends T> clazz, String param, String queryString, MatchMode matchmode, List<Restriction<?>> restrictions);
- long countByParam(Class<? extends T> clazz, String param, String queryString, MatchMode matchmode, List<Criterion> criterion);
+ public long countByParam(Class<? extends T> clazz, String param, String queryString, MatchMode matchmode, List<Criterion> criterion);
- List<T> findByParam(Class<? extends T> clazz, String param, String queryString, MatchMode matchmode, List<Criterion> criterion, Integer pageSize, Integer pageNumber,
+ public <S extends T> List<S> findByParam(Class<S> clazz, String param, String queryString, MatchMode matchmode, List<Criterion> criterion, Integer pageSize, Integer pageNumber,
List<OrderHint> orderHints, List<String> propertyPaths);
}
* authorTeam.persistentTitleCache
* @return a List of instances of type T matching the queryString
*/
- public List<T> findByTitle(Class<? extends T> clazz, String queryString,MatchMode matchmode, List<Criterion> criteria, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths);
+ public <S extends T> List<S> findByTitle(Class<S> clazz, String queryString,MatchMode matchmode, List<Criterion> criteria, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths);
/**
* authorTeam.persistentTitleCache
* @return a List of instances of type T matching the queryString
*/
- public List<T> findByTitleWithRestrictions(Class<? extends T> clazz, String queryString,MatchMode matchmode, List<Restriction<?>> restrictions, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths);
+ public <S extends T> List<S> findByTitleWithRestrictions(Class<S> clazz, String queryString,MatchMode matchmode, List<Restriction<?>> restrictions, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths);
* @param matchMode
* @return
*/
- public List<T> findTitleCache(Class<? extends T> clazz, String queryString, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, MatchMode matchMode);
+ public <S extends T> List<S> findTitleCache(Class<S> clazz, String queryString, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, MatchMode matchMode);
/**
* Return a List of objects matching the given query string, optionally filtered by class, optionally with a particular MatchMode
*
* authorTeam.persistentTitleCache
* @return a List of instances of type T matching the queryString
*/
- public List<T> findByReferenceTitle(Class<? extends T> clazz, String queryString,MatchMode matchmode, List<Criterion> criteria, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths);
+ public <S extends T> List<S> findByReferenceTitle(Class<S> clazz, String queryString,MatchMode matchmode, List<Criterion> criteria, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths);
/**
* Return a List of objects matching the given query string, optionally filtered by class, optionally with a particular MatchMode
* authorTeam.persistentTitleCache
* @return a List of instances of type T matching the queryString
*/
- public List<T> findByReferenceTitleWithRestrictions(Class<? extends T> clazz, String queryString,MatchMode matchmode, List<Restriction<?>> restrictions, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths);
+ public <S extends T> List<S> findByReferenceTitleWithRestrictions(Class<S> clazz, String queryString,MatchMode matchmode, List<Restriction<?>> restrictions, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths);
/**
* Return a count of objects matching the given query string in the titleCache, optionally filtered by class, optionally with a particular MatchMode
* @see <a href="http://lucene.apache.org/java/2_4_0/queryparsersyntax.html">Apache Lucene - Query Parser Syntax</a>
*/
public List<T> search(Class<? extends T> clazz, String queryString, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths);
+// public <S extends T> List<S> search(Class<S> clazz, String queryString, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths);
/**
* Suggest a query that will return hits based upon an existing lucene query string (that is presumably misspelt and returns no hits)
* @return\r
*/\r
@Override\r
- public List<T> findByParam(Class<? extends T> clazz, String param, String queryString, MatchMode matchmode,\r
+ public <S extends T> List<S> findByParam(Class<S> clazz, String param, String queryString, MatchMode matchmode,\r
List<Criterion> criterion, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints,\r
List<String> propertyPaths) {\r
\r
addOrder(criteria, orderHints);\r
\r
@SuppressWarnings("unchecked")\r
- List<T> result = criteria.list();\r
+ List<S> result = criteria.list();\r
defaultBeanInitializer.initializeAll(result, propertyPaths);\r
return result;\r
}\r
\r
\r
@Override\r
- public List<T> findByParamWithRestrictions(Class<? extends T> clazz, String param, String queryString,\r
+ public <S extends T> List<S> findByParamWithRestrictions(Class<S> clazz, String param, String queryString,\r
MatchMode matchmode, List<Restriction<?>> restrictions, Integer pageSize, Integer pageNumber,\r
List<OrderHint> orderHints, List<String> propertyPaths) {\r
\r
addOrder(criteria, orderHints);\r
\r
@SuppressWarnings("unchecked")\r
- List<T> result = criteria.list();\r
+ List<S> result = criteria.list();\r
defaultBeanInitializer.initializeAll(result, propertyPaths);\r
return result;\r
\r
return results;
}
+ /**
+ * FIXME candidate for removal
+ * @deprecated use {@link #findTitleCache(Class, String, Integer, Integer, List, MatchMode)} instead (or other methods)
+ */
@Override
+ @Deprecated
public List<T> findByTitleAndClass(String queryString, Class<T> clazz) {
checkNotInPriorView("IdentifiableDaoBase.findByTitleAndClass(String queryString, Class<T> clazz)");
Criteria crit = getSession().createCriteria(clazz);
crit.add(Restrictions.ilike("titleCache", queryString));
+ @SuppressWarnings("unchecked")
List<T> results = crit.list();
return results;
}
@Override
- public List<T> findTitleCache(Class<? extends T> clazz, String queryString, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, MatchMode matchMode){
+ public <S extends T> List<S> findTitleCache(Class<S> clazz, String queryString, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, MatchMode matchMode){
Query query = prepareFindTitleCache(clazz, queryString, pageSize,
pageNumber, matchMode, false);
- List<T> result = query.list();
+ @SuppressWarnings("unchecked")
+ List<S> result = query.list();
return result;
}
}
@Override
- public List<T> findByTitle(Class<? extends T> clazz, String queryString, MatchMode matchmode, List<Criterion> criterion, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) {
+ public <S extends T> List<S> findByTitle(Class<S> clazz, String queryString, MatchMode matchmode, List<Criterion> criterion, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) {
return findByParam(clazz, "titleCache", queryString, matchmode, criterion, pageSize, pageNumber, orderHints, propertyPaths);
}
@Override
- public List<T> findByReferenceTitle(Class<? extends T> clazz, String queryString, MatchMode matchmode, List<Criterion> criterion, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) {
+ public <S extends T> List<S> findByReferenceTitle(Class<S> clazz, String queryString, MatchMode matchmode, List<Criterion> criterion, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) {
return findByParam(clazz, "title", queryString, matchmode, criterion, pageSize, pageNumber, orderHints, propertyPaths);
}
@Override
- public List<T> findByTitleWithRestrictions(Class<? extends T> clazz, String queryString, MatchMode matchmode, List<Restriction<?>> restrictions, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) {
+ public <S extends T> List<S> findByTitleWithRestrictions(Class<S> clazz, String queryString, MatchMode matchmode, List<Restriction<?>> restrictions, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) {
return findByParamWithRestrictions(clazz, "titleCache", queryString, matchmode, restrictions, pageSize, pageNumber, orderHints, propertyPaths);
}
@Override
- public List<T> findByReferenceTitleWithRestrictions(Class<? extends T> clazz, String queryString, MatchMode matchmode, List<Restriction<?>> restrictions, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) {
+ public <S extends T> List<S> findByReferenceTitleWithRestrictions(Class<S> clazz, String queryString, MatchMode matchmode, List<Restriction<?>> restrictions, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) {
return findByParamWithRestrictions(clazz, "title", queryString, matchmode, restrictions, pageSize, pageNumber, orderHints, propertyPaths);
}
}
@Override
- public List<T> findByParam(Class<? extends T> clazz, String param, String queryString, MatchMode matchmode, List<Criterion> criterion, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) {
+ public <S extends T> List<S> findByParam(Class<S> clazz, String param, String queryString, MatchMode matchmode, List<Criterion> criterion, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) {
checkNotInPriorView("IdentifiableDaoBase.findByParam(Class<? extends T> clazz, String queryString, MatchMode matchmode, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths)");
return super.findByParam(clazz, param, queryString, matchmode, criterion, pageSize, pageNumber, orderHints, propertyPaths);
}
@Override
- public List<T> findByParamWithRestrictions(Class<? extends T> clazz, String param, String queryString, MatchMode matchmode, List<Restriction<?>> restrictions, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) {
+ public <S extends T> List<S> findByParamWithRestrictions(Class<S> clazz, String param, String queryString, MatchMode matchmode, List<Restriction<?>> restrictions, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) {
checkNotInPriorView("IdentifiableDaoBase.findByParam(Class<? extends T> clazz, String queryString, MatchMode matchmode, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths)");
return super.findByParamWithRestrictions(clazz, param, queryString, matchmode, restrictions, pageSize, pageNumber, orderHints, propertyPaths);
}
@Override
public List<DescriptionElementBase> search(Class<? extends DescriptionElementBase> clazz, String queryString, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) {
+// public <S extends DescriptionElementBase> List<S> search(Class<S> clazz, String queryString, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) {
checkNotInPriorView("DescriptionElementDaoImpl.searchTextData(String queryString, Integer pageSize, Integer pageNumber)");
QueryParser queryParser = new QueryParser(defaultField, new StandardAnalyzer());
\r
@Override\r
@SuppressWarnings("unchecked")\r
- public List<TaxonNode> listRankSpecificRootNodes(Classification classification, Rank rank,\r
+ public List<TaxonNode> listRankSpecificRootNodes(Classification classification, TaxonNode taxonNode, Rank rank,\r
boolean includeUnpublished, Integer limit, Integer start, List<String> propertyPaths, int queryIndex){\r
\r
List<TaxonNode> results = new ArrayList<>();\r
- Query[] queries = prepareRankSpecificRootNodes(classification, rank, includeUnpublished, false);\r
+ Query[] queries = prepareRankSpecificRootNodes(classification, taxonNode, rank, includeUnpublished, false);\r
\r
// since this method is using two queries sequentially the handling of limit and start\r
// is a bit more complex\r
}\r
\r
@Override\r
- public long[] countRankSpecificRootNodes(Classification classification, boolean includeUnpublished, Rank rank) {\r
+ public long[] countRankSpecificRootNodes(Classification classification, TaxonNode subtree, boolean includeUnpublished, Rank rank) {\r
\r
long[] result = new long[(rank == null ? 1 : 2)];\r
- Query[] queries = prepareRankSpecificRootNodes(classification, rank, includeUnpublished, true);\r
+ Query[] queries = prepareRankSpecificRootNodes(classification, subtree, rank, includeUnpublished, true);\r
int i = 0;\r
for(Query q : queries) {\r
result[i++] = (Long)q.uniqueResult();\r
* one or two Queries as array, depending on the <code>rank</code> parameter:\r
* <code>rank == null</code>: array with one item, <code>rank != null</code>: array with two items.\r
*/\r
- private Query[] prepareRankSpecificRootNodes(Classification classification, Rank rank,\r
+ private Query[] prepareRankSpecificRootNodes(Classification classification,\r
+ TaxonNode subtree, Rank rank,\r
boolean includeUnpublished, boolean doCount) {\r
Query query1;\r
Query query2 = null;\r
\r
String whereClassification = classification != null? " AND tn.classification = :classification " : "";\r
String whereUnpublished = includeUnpublished? "" : " AND tn.taxon.publish = :publish ";\r
+ String whereSubtree = subtree != null ? " AND tn.treeIndex like :treeIndexLike " : "";\r
+ TreeIndex treeIndex = TreeIndex.NewInstance(subtree);\r
+ String whereHighest =\r
+ treeIndex == null ? " tn.parent.parent = null ":\r
+ treeIndex.isTreeRoot() ? " tn.parent.treeIndex = :treeIndex ":\r
+ " tn.treeIndex = :treeIndex " ;\r
\r
String selectWhat = doCount ? "COUNT(distinct tn)" : "DISTINCT tn";\r
\r
String joinFetch = doCount ? "" : " JOIN FETCH tn.taxon t JOIN FETCH t.name n LEFT JOIN FETCH n.rank LEFT JOIN FETCH t.sec ";\r
\r
if(rank == null){\r
- String hql = "SELECT " + selectWhat + " FROM TaxonNode tn" +\r
- joinFetch +\r
- " WHERE tn.parent.parent = null " +\r
- whereClassification + whereUnpublished;\r
+ String hql = "SELECT " + selectWhat +\r
+ " FROM TaxonNode tn" +\r
+ joinFetch +\r
+ " WHERE " + whereHighest +\r
+ whereClassification + whereUnpublished;\r
query1 = getSession().createQuery(hql);\r
} else {\r
// this is for the cases\r
// - exact match of the ranks\r
- // - rank of root node is lower but is has no parents\r
- String hql1 = "SELECT " + selectWhat + " FROM TaxonNode tn " +\r
- joinFetch +\r
+ // - rank of root node is lower but it has no parents\r
+ String hql1 = "SELECT " + selectWhat +\r
+ " FROM TaxonNode tn " +\r
+ joinFetch +\r
" WHERE " +\r
" (tn.taxon.name.rank = :rank" +\r
- " OR (tn.taxon.name.rank.orderIndex > :rankOrderIndex AND tn.parent.parent = null)" +\r
+ " OR ((tn.taxon.name.rank.orderIndex > :rankOrderIndex) AND (" + whereHighest + "))" +\r
" )"\r
- + whereClassification + whereUnpublished ;\r
+ + whereClassification + whereSubtree + whereUnpublished ;\r
\r
// this is for the case\r
// - rank of root node is lower and it has a parent with higher rank\r
- String hql2 = "SELECT " + selectWhat + " FROM TaxonNode tn JOIN tn.parent as parent" +\r
- joinFetch +\r
+ String whereParentSubtree = subtree != null ? " AND parent.treeIndex like :treeIndexLike " : "";\r
+ String hql2 = "SELECT " + selectWhat +\r
+ " FROM TaxonNode tn JOIN tn.parent as parent" +\r
+ joinFetch +\r
" WHERE " +\r
- " (tn.taxon.name.rank.orderIndex > :rankOrderIndex AND parent.taxon.name.rank.orderIndex < :rankOrderIndex )"\r
- + whereClassification + whereUnpublished;\r
+ " (tn.taxon.name.rank.orderIndex > :rankOrderIndex "\r
+ + " AND parent.taxon.name.rank.orderIndex < :rankOrderIndex )"\r
+ + whereClassification + whereSubtree\r
+ + whereParentSubtree + whereUnpublished;\r
+\r
query1 = getSession().createQuery(hql1);\r
query2 = getSession().createQuery(hql2);\r
query1.setParameter("rank", rank);\r
query2.setParameter("classification", classification);\r
}\r
}\r
+ if (subtree != null){\r
+ query1.setParameter("treeIndex", subtree.treeIndex());\r
+ if (rank != null){\r
+ query1.setParameter("treeIndexLike", subtree.treeIndex()+"%");\r
+ }\r
+ if(query2 != null) {\r
+ query2.setParameter("treeIndexLike", subtree.treeIndex()+"%");\r
+ }\r
+ }\r
if (!includeUnpublished){\r
query1.setBoolean("publish", true);\r
if(query2 != null) {\r
}\r
\r
@Override\r
- public List<TaxonNode> listChildrenOf(Taxon taxon, Classification classification, boolean includeUnpublished,\r
+ public List<TaxonNode> listChildrenOf(Taxon taxon, Classification classification, TaxonNode subtree, boolean includeUnpublished,\r
Integer pageSize, Integer pageIndex, List<String> propertyPaths){\r
- Query query = prepareListChildrenOf(taxon, classification, false, includeUnpublished);\r
+ Query query = prepareListChildrenOf(taxon, classification, subtree, false, includeUnpublished);\r
\r
setPagingParameter(query, pageSize, pageIndex);\r
\r
\r
\r
@Override\r
- public Long countChildrenOf(Taxon taxon, Classification classification,\r
+ public Long countChildrenOf(Taxon taxon, Classification classification, TaxonNode subtree,\r
boolean includeUnpublished){\r
- Query query = prepareListChildrenOf(taxon, classification, true, includeUnpublished);\r
+ Query query = prepareListChildrenOf(taxon, classification, subtree, true, includeUnpublished);\r
Long count = (Long) query.uniqueResult();\r
return count;\r
}\r
return count;\r
}\r
\r
- private Query prepareListChildrenOf(Taxon taxon, Classification classification,\r
+ private Query prepareListChildrenOf(Taxon taxon, Classification classification, TaxonNode subtree,\r
boolean doCount, boolean includeUnpublished){\r
\r
String selectWhat = doCount ? "COUNT(cn)" : "cn";\r
if (!includeUnpublished){\r
hql += " AND cn.taxon.publish = :publish ";\r
}\r
+ if (subtree != null){\r
+ hql += " AND tn.treeIndex like :treeIndexLike ";\r
+ }\r
Query query = getSession().createQuery(hql);\r
query.setParameter("taxon", taxon);\r
query.setParameter("classification", classification);\r
if (!includeUnpublished){\r
query.setBoolean("publish", Boolean.TRUE);\r
}\r
+ if (subtree != null){\r
+ query.setParameter("treeIndexLike", subtree.treeIndexLike());\r
+ }\r
return query;\r
}\r
\r
return result;
}
+ //TODO needed? Currently only used by tests.
public List<TaxonBase> getTaxaByName(boolean doTaxa, boolean doSynonyms, boolean includeUnpublished,
String queryString, MatchMode matchMode, Integer pageSize, Integer pageNumber) {
return getTaxaByName(doTaxa, doSynonyms, false, false, false,
- queryString, null, matchMode, null, includeUnpublished, null, pageSize, pageNumber, null);
+ queryString, null, null, matchMode, null, includeUnpublished, null, pageSize, pageNumber, null);
}
@Override
@Override
public List<TaxonBase> getTaxaByName(boolean doTaxa, boolean doSynonyms, boolean doMisappliedNames, boolean doCommonNames,
boolean includeAuthors,
- String queryString, Classification classification,
+ String queryString, Classification classification, TaxonNode subtree,
MatchMode matchMode, Set<NamedArea> namedAreas, boolean includeUnpublished, NameSearchOrder order,
Integer pageSize, Integer pageNumber, List<String> propertyPaths) {
boolean doCount = false;
String searchField = includeAuthors ? "titleCache" : "nameCache";
- Query query = prepareTaxaByName(doTaxa, doSynonyms, doMisappliedNames, doCommonNames, includeUnpublished, searchField, queryString, classification, matchMode, namedAreas, order, pageSize, pageNumber, doCount);
+ Query query = prepareTaxaByName(doTaxa, doSynonyms, doMisappliedNames, doCommonNames, includeUnpublished, searchField, queryString, classification, subtree, matchMode, namedAreas, order, pageSize, pageNumber, doCount);
if (query != null){
@SuppressWarnings({ "unchecked", "rawtypes" })
}else{
return new ArrayList<>();
}
-
-
}
-
//new search for the editor, for performance issues the return values are only uuid and titleCache, to avoid the initialisation of all objects
@Override
@SuppressWarnings("unchecked")
public List<UuidAndTitleCache<? extends IdentifiableEntity>> getTaxaByNameForEditor(boolean doTaxa, boolean doSynonyms, boolean doNamesWithoutTaxa,
- boolean doMisappliedNames, boolean doCommonNames, boolean includeUnpublished, String queryString, Classification classification,
+ boolean doMisappliedNames, boolean doCommonNames, boolean includeUnpublished, String queryString, Classification classification, TaxonNode subtree,
MatchMode matchMode, Set<NamedArea> namedAreas, NameSearchOrder order) {
if (order == null){
}
}
Query query = prepareTaxaByNameForEditor(doTaxa, doSynonyms, doMisappliedNames, doCommonNames, includeUnpublished,
- "nameCache", queryString, classification, matchMode, namedAreas, doCount, order);
+ "nameCache", queryString, classification, subtree, matchMode, namedAreas, doCount, order);
if (query != null){
List<Object[]> results = query.list();
*
*/
private Query prepareTaxaByNameForEditor(boolean doTaxa, boolean doSynonyms, boolean doMisappliedNames, boolean doCommonNames,
- boolean includeUnpublished, String searchField, String queryString, Classification classification,
+ boolean includeUnpublished, String searchField, String queryString, Classification classification, TaxonNode subtree,
MatchMode matchMode, Set<NamedArea> namedAreas, boolean doCount, NameSearchOrder order) {
return prepareByNameQuery(doTaxa, doSynonyms, doMisappliedNames, doCommonNames, includeUnpublished,
searchField, queryString,
- classification, matchMode, namedAreas, order, doCount, true);
+ classification, subtree, matchMode, namedAreas, order, doCount, true);
}
*/
private Query prepareByNameQuery(boolean doTaxa, boolean doSynonyms, boolean doMisappliedNames,
boolean doCommonNames, boolean includeUnpublished, String searchField, String queryString,
- Classification classification, MatchMode matchMode, Set<NamedArea> namedAreas,
+ Classification classification, TaxonNode subtree, MatchMode matchMode, Set<NamedArea> namedAreas,
NameSearchOrder order, boolean doCount, boolean returnIdAndTitle){
boolean doProParteSynonyms = doSynonyms; //we may distinguish in future
}
Subselects subSelects = createByNameHQLString(doConceptRelations,
- includeUnpublished, classification, areasExpanded, matchMode, searchField);
+ includeUnpublished, classification, subtree, areasExpanded, matchMode, searchField);
String taxonSubselect = subSelects.taxonSubselect;
String synonymSubselect = subSelects.synonymSubselect;
String conceptSelect = subSelects.conceptSelect;
// find Taxa
Query subTaxon = getSearchQueryString(hqlQueryString, taxonSubselect);
- addRestrictions(doAreaRestriction, classification, includeUnpublished,
+ addRestrictions(doAreaRestriction, classification, subtree, includeUnpublished,
namedAreasUuids, subTaxon);
taxonIDs = subTaxon.list();
}
if(doSynonyms){
// find synonyms
Query subSynonym = getSearchQueryString(hqlQueryString, synonymSubselect);
- addRestrictions(doAreaRestriction, classification, includeUnpublished, namedAreasUuids,subSynonym);
+ addRestrictions(doAreaRestriction, classification, subtree, includeUnpublished, namedAreasUuids,subSynonym);
synonymIDs = subSynonym.list();
}
if (doConceptRelations ){
relTypeSet.addAll(TaxonRelationshipType.allSynonymTypes());
}
subMisappliedNames.setParameterList("rTypeSet", relTypeSet);
- addRestrictions(doAreaRestriction, classification, includeUnpublished, namedAreasUuids, subMisappliedNames);
+ addRestrictions(doAreaRestriction, classification, subtree, includeUnpublished, namedAreasUuids, subMisappliedNames);
taxonIDs.addAll(subMisappliedNames.list());
}
if(doCommonNames){
// find Taxa
Query subCommonNames = getSearchQueryString(hqlQueryString, commonNameSubSelect);
- addRestrictions(doAreaRestriction, classification, includeUnpublished, namedAreasUuids, subCommonNames);
+ addRestrictions(doAreaRestriction, classification, subtree, includeUnpublished, namedAreasUuids, subCommonNames);
taxonIDs.addAll(subCommonNames.list());
}
* @param includeUnpublished
* @param classification
* @param doAreaRestriction
+ * @param subtree
* @param namedAreasUuids
* @param subTaxon
*/
- protected void addRestrictions(boolean doAreaRestriction, Classification classification, boolean includeUnpublished,
+ protected void addRestrictions(boolean doAreaRestriction, Classification classification, TaxonNode subtree, boolean includeUnpublished,
Set<UUID> namedAreasUuids, Query query) {
if(doAreaRestriction){
query.setParameterList("namedAreasUuids", namedAreasUuids);
if(classification != null){
query.setParameter("classification", classification);
}
+ if(subtree != null){
+ query.setParameter("treeIndexLike", subtree.treeIndex() + "%");
+ }
if(!includeUnpublished){
query.setBoolean("publish", true);
}
*/
private Query prepareTaxaByName(boolean doTaxa, boolean doSynonyms, boolean doMisappliedNames,
boolean doCommonNames, boolean includeUnpublished, String searchField, String queryString,
- Classification classification, MatchMode matchMode, Set<NamedArea> namedAreas, NameSearchOrder order, Integer pageSize, Integer pageNumber, boolean doCount) {
+ Classification classification, TaxonNode subtree, MatchMode matchMode, Set<NamedArea> namedAreas, NameSearchOrder order, Integer pageSize, Integer pageNumber, boolean doCount) {
Query query = prepareByNameQuery(doTaxa, doSynonyms, doMisappliedNames, doCommonNames, includeUnpublished,
- searchField, queryString, classification, matchMode, namedAreas, order, doCount, false);
+ searchField, queryString, classification, subtree, matchMode, namedAreas, order, doCount, false);
if(pageSize != null && !doCount && query != null) {
query.setMaxResults(pageSize);
@Override
public long countTaxaByName(boolean doTaxa, boolean doSynonyms, boolean doMisappliedNames, boolean doCommonNames,
- boolean doIncludeAuthors, String queryString, Classification classification,
+ boolean doIncludeAuthors, String queryString, Classification classification, TaxonNode subtree,
MatchMode matchMode, Set<NamedArea> namedAreas, boolean includeUnpublished) {
boolean doCount = true;
String searchField = doIncludeAuthors ? "titleCache": "nameCache";
Query query = prepareTaxaByName(doTaxa, doSynonyms, doMisappliedNames, doCommonNames, includeUnpublished,
- searchField, queryString, classification, matchMode, namedAreas, null, null, null, doCount);
+ searchField, queryString, classification, subtree, matchMode, namedAreas, null, null, null, doCount);
if (query != null) {
return (Long)query.uniqueResult();
}else{
* @param areaQuery
*/
private void expandNamedAreas(Collection<NamedArea> namedAreas, Set<NamedArea> areasExpanded, Query areaQuery) {
- List<NamedArea> childAreas;
for(NamedArea a : namedAreas){
areasExpanded.add(a);
areaQuery.setParameter("area", a);
- childAreas = areaQuery.list();
+ @SuppressWarnings("unchecked")
+ List<NamedArea> childAreas = areaQuery.list();
if(childAreas.size() > 0){
areasExpanded.addAll(childAreas);
expandNamedAreas(childAreas, areasExpanded, areaQuery);
}
@Override
- public List<TaxonBase> findByNameTitleCache(boolean doTaxa, boolean doSynonyms, boolean includeUnpublished, String queryString, Classification classification, MatchMode matchMode, Set<NamedArea> namedAreas, NameSearchOrder order, Integer pageNumber, Integer pageSize, List<String> propertyPaths) {
+ public List<TaxonBase> findByNameTitleCache(boolean doTaxa, boolean doSynonyms, boolean includeUnpublished, String queryString, Classification classification, TaxonNode subtree, MatchMode matchMode, Set<NamedArea> namedAreas, NameSearchOrder order, Integer pageNumber, Integer pageSize, List<String> propertyPaths) {
boolean doCount = false;
- Query query = prepareTaxaByName(doTaxa, doSynonyms, false, false, includeUnpublished, "titleCache", queryString, classification, matchMode, namedAreas, order, pageSize, pageNumber, doCount);
+ Query query = prepareTaxaByName(doTaxa, doSynonyms, false, false, includeUnpublished, "titleCache", queryString, classification, subtree, matchMode, namedAreas, order, pageSize, pageNumber, doCount);
if (query != null){
@SuppressWarnings({ "unchecked", "rawtypes" })
List<TaxonBase> results = query.list();
private String prepareTaxonRelationshipQuery(Set<TaxonRelationshipType> types, boolean includeUnpublished,
Direction direction, boolean isCount) {
String selectStr = isCount? " count(rel) as n ":" rel ";
- String result = "SELECT " + selectStr +
- " FROM TaxonRelationship rel " +
- " WHERE rel."+direction+" = :relatedTaxon";
+ String result = "SELECT " + selectStr + " FROM TaxonRelationship rel ";
+ if(direction != null){
+ result += " WHERE rel."+direction+" = :relatedTaxon";
+ } else {
+ result += " WHERE (rel.relatedFrom = :relatedTaxon OR rel.relatedTo = :relatedTaxon )";
+ }
if (types != null){
result += " AND rel.type IN (:types) ";
}
}
private Subselects createByNameHQLString(boolean doConceptRelations,
- boolean includeUnpublished, Classification classification, Set<NamedArea> areasExpanded,
- MatchMode matchMode, String searchField){
+ boolean includeUnpublished, Classification classification, TaxonNode subtree,
+ Set<NamedArea> areasExpanded, MatchMode matchMode, String searchField){
+
boolean doAreaRestriction = areasExpanded.size() > 0;
+ boolean hasTaxonNodeFilter = classification != null || subtree != null;
+
String doAreaRestrictionSubSelect =
" SELECT %s.id "
+ " FROM Distribution e "
+ " JOIN e.inDescription d "
+ " JOIN d.taxon t " +
- (classification != null ? " JOIN t.taxonNodes AS tn " : " ");
+ (hasTaxonNodeFilter ? " JOIN t.taxonNodes AS tn " : " ");
String doAreaRestrictionConceptRelationSubSelect =
"SELECT %s.id "
String doTaxonSubSelect =
" SELECT %s.id "
- + " FROM Taxon t " + (classification != null ? " "
+ + " FROM Taxon t " + (hasTaxonNodeFilter ? " "
+ " JOIN t.taxonNodes AS tn " : " ");
String doTaxonMisappliedNameSubSelect =
String doConceptRelationJoin =
" LEFT JOIN t.relationsFromThisTaxon AS rft " +
" LEFT JOIN rft.relatedTo AS rt " +
- (classification != null ? " LEFT JOIN rt.taxonNodes AS tn2 " : " ") +
+ (hasTaxonNodeFilter ? " LEFT JOIN rt.taxonNodes AS tn2 " : " ") +
" LEFT JOIN rt.name AS n2" +
" LEFT JOIN rft.type as rtype";
" LEFT JOIN com.feature f ";
- String doClassificationWhere = " tn.classification = :classification";
- String doClassificationForConceptRelationsWhere = " tn2.classification = :classification";
+ String doTreeWhere = classification == null ? "" : " AND tn.classification = :classification";
+ String doTreeForConceptRelationsWhere = classification == null ? "": " AND tn2.classification = :classification";
+
+ String doSubtreeWhere = subtree == null? "":" AND tn.treeIndex like :treeIndexLike";
+ String doSubtreeForConceptRelationsWhere = subtree == null? "":" AND tn2.treeIndex like :treeIndexLike";
String doAreaRestrictionWhere = " e.area.uuid in (:namedAreasUuids)";
String doCommonNamesRestrictionWhere = " (f.supportsCommonTaxonName = true and com.name "+matchMode.getMatchOperator()+" :queryString )";
String conceptSelect = null;
String commonNameSubselect = null;
- if(classification != null ){
+ if(hasTaxonNodeFilter){
if (!doConceptRelations){
if(doAreaRestriction){
taxonSubselect = String.format(doAreaRestrictionSubSelect, "t") + doTaxonNameJoin +
- " WHERE " + doAreaRestrictionWhere +
- " AND " + doClassificationWhere +
+ " WHERE (1=1) AND " + doAreaRestrictionWhere +
+ doTreeWhere + doSubtreeWhere +
" AND " + String.format(doSearchFieldWhere, "n");
synonymSubselect = String.format(doAreaRestrictionSubSelect, "s") + doSynonymNameJoin +
- " WHERE " + doAreaRestrictionWhere +
- " AND " + doClassificationWhere +
+ " WHERE (1=1) AND " + doAreaRestrictionWhere +
+ doTreeWhere + doSubtreeWhere +
" AND " + String.format(doSearchFieldWhere, "sn");
commonNameSubselect = String.format(doAreaRestrictionSubSelect, "t") + doCommonNamesJoin +
- " WHERE " + doAreaRestrictionWhere +
- " AND " + doClassificationWhere +
+ " WHERE (1=1) AND " + doAreaRestrictionWhere +
+ doTreeWhere + doSubtreeWhere +
" AND " + String.format(doSearchFieldWhere, "n") +
" AND " + doCommonNamesRestrictionWhere;
} else {//no area restriction
taxonSubselect = String.format(doTaxonSubSelect, "t" )+ doTaxonNameJoin +
- " WHERE " + doClassificationWhere +
+ " WHERE (1=1) " + doTreeWhere + doSubtreeWhere +
" AND " + String.format(doSearchFieldWhere, "n");
synonymSubselect = String.format(doTaxonSubSelect, "s" ) + doSynonymNameJoin +
- " WHERE " + doClassificationWhere +
+ " WHERE (1=1) " + doTreeWhere + doSubtreeWhere +
" AND " + String.format(doSearchFieldWhere, "sn");
commonNameSubselect =String.format(doTaxonSubSelect, "t" )+ doCommonNamesJoin +
- " WHERE " + doClassificationWhere +
+ " WHERE (1=1) " + doTreeWhere + doSubtreeWhere +
" AND " + doCommonNamesRestrictionWhere;
}
}else{ //concept relations included
conceptSelect = String.format(doAreaRestrictionConceptRelationSubSelect, "t") + doTaxonNameJoin + doConceptRelationJoin +
" WHERE " + doAreaRestrictionWhere +
" AND " + String.format(doSearchFieldWhere, "n") +
- " AND " + doClassificationForConceptRelationsWhere +
+ doTreeForConceptRelationsWhere + doSubtreeForConceptRelationsWhere +
" AND " + doRelationshipTypeComparison;
taxonSubselect = String.format(doAreaRestrictionSubSelect, "t") + doTaxonNameJoin +
" WHERE " + doAreaRestrictionWhere +
" AND " + String.format(doSearchFieldWhere, "n") +
- " AND " + doClassificationWhere;
+ doTreeWhere + doSubtreeWhere;
synonymSubselect = String.format(doAreaRestrictionSubSelect, "s") + doSynonymNameJoin +
" WHERE " + doAreaRestrictionWhere +
- " AND " + doClassificationWhere +
+ doTreeWhere + doSubtreeWhere +
" AND " + String.format(doSearchFieldWhere, "sn");
commonNameSubselect= String.format(doAreaRestrictionSubSelect, "t")+ doCommonNamesJoin +
" WHERE " + doAreaRestrictionWhere +
- " AND " + doClassificationWhere +
+ doTreeWhere + doSubtreeWhere +
" AND " + doCommonNamesRestrictionWhere;
} else {//no area restriction
conceptSelect = String.format(doTaxonMisappliedNameSubSelect, "t" ) + doTaxonNameJoin + doConceptRelationJoin +
" WHERE " + String.format(doSearchFieldWhere, "n") +
- " AND " + doClassificationForConceptRelationsWhere +
+ doTreeForConceptRelationsWhere + doSubtreeForConceptRelationsWhere +
" AND " + doRelationshipTypeComparison;
taxonSubselect = String.format(doTaxonSubSelect, "t" ) + doTaxonNameJoin +
" WHERE " + String.format(doSearchFieldWhere, "n") +
- " AND "+ doClassificationWhere;
+ doTreeWhere + doSubtreeWhere;
synonymSubselect = String.format(doTaxonSubSelect, "s" ) + doSynonymNameJoin +
- " WHERE " + doClassificationWhere +
+ " WHERE (1=1) " + doTreeWhere + doSubtreeWhere +
" AND " + String.format(doSearchFieldWhere, "sn");
commonNameSubselect= String.format(doTaxonSubSelect, "t")+ doCommonNamesJoin +
- " WHERE " + doClassificationWhere +
+ " WHERE (1=1) " + doTreeWhere + doSubtreeWhere +
" AND " + doCommonNamesRestrictionWhere;
}
}
- } else { //classification = null
+ } else { //classification = null && subtree = null
if(doAreaRestriction){
conceptSelect = String.format(doAreaRestrictionConceptRelationSubSelect, "t") + doTaxonNameJoin + doConceptRelationJoin +
" WHERE " + doAreaRestrictionWhere +
defaultBeanInitializer.initializeAll(results, propertyPaths);\r
return results;\r
}else{\r
- return classificationDao.listChildrenOf(node.getTaxon(), node.getClassification(),\r
+ return classificationDao.listChildrenOf(node.getTaxon(), node.getClassification(), null,\r
includeUnpublished, pageSize, pageIndex, propertyPaths);\r
}\r
\r
return ((Integer)crit.uniqueResult().hashCode()).longValue();\r
}else{\r
return classificationDao.countChildrenOf(\r
- node.getTaxon(), classification, includeUnpublished);\r
+ node.getTaxon(), classification, null, includeUnpublished);\r
}\r
}\r
/**\r
* <code>rank != null</code>.\r
* @return\r
*/\r
- public List<TaxonNode> listRankSpecificRootNodes(Classification classification, Rank rank,\r
+ public List<TaxonNode> listRankSpecificRootNodes(Classification classification, TaxonNode subtree, Rank rank,\r
boolean includeUnpublished, Integer limit, Integer start, List<String> propertyPaths, int queryIndex);\r
\r
- public long[] countRankSpecificRootNodes(Classification classification, boolean includeUnpublished, Rank rank);\r
+ public long[] countRankSpecificRootNodes(Classification classification, TaxonNode subtree, boolean includeUnpublished, Rank rank);\r
\r
- public List<TaxonNode> listChildrenOf(Taxon taxon, Classification classification, boolean includeUnpublished,\r
+ public List<TaxonNode> listChildrenOf(Taxon taxon, Classification classification, TaxonNode subtree, boolean includeUnpublished,\r
Integer pageSize, Integer pageIndex, List<String> propertyPaths);\r
\r
- public Long countChildrenOf(Taxon taxon, Classification classification, boolean includeUnpublished);\r
+ public Long countChildrenOf(Taxon taxon, Classification classification, TaxonNode subtree, boolean includeUnpublished);\r
\r
public TaxonNode getRootNode(UUID classificationUuid);\r
\r
* @param doSynonyms
* @param queryString
* @param classification TODO
+ * @param subtree
* @param matchMode
* @param namedAreas TODO
* @param pageSize
* @return list of found taxa
*/
public List<TaxonBase> getTaxaByName(boolean doTaxa, boolean doSynonyms, boolean doMisappliedNames, boolean doCommonNames,
- boolean includeAuthors, String queryString, Classification classification,
+ boolean includeAuthors, String queryString, Classification classification, TaxonNode subtree,
MatchMode matchMode, Set<NamedArea> namedAreas, boolean includeUnpublished,
NameSearchOrder order, Integer pageSize, Integer pageNumber, List<String> propertyPaths);
* @param doSynonyms
* @param queryString
* @param classification TODO
+ * @param subtree
* @param matchMode
* @param namedAreas
* @param pageSize
* @return
*/
public long countTaxaByName(boolean doTaxa, boolean doSynonyms, boolean doMisappliedNames, boolean doCommonNames,
- boolean doIncludeAuthors, String queryString, Classification classification,
+ boolean doIncludeAuthors, String queryString, Classification classification, TaxonNode subtree,
MatchMode matchMode, Set<NamedArea> namedAreas, boolean includeUnpublished);
-// /**
-// * @param queryString
-// * @param matchMode
-// * @param accepted
-// * @return
-// */
-// public Integer countTaxaByName(String queryString, MatchMode matchMode,
-// Boolean accepted);
-
-// /**
-// * Returns a count of TaxonBase instances where the
-// * taxon.name properties match the parameters passed.
-// *
-// * @param queryString search string
-// * @param matchMode way how search string shall be matched: exact, beginning, or anywhere
-// * @param selectModel all taxon base, taxa, or synonyms
-// */
-// public Integer countTaxaByName(String queryString, MatchMode matchMode, SelectMode selectMode);
/**
* Returns a count of TaxonBase instances where the
* @return
*/
public List<TaxonBase> findByNameTitleCache(boolean doTaxa, boolean doSynonyms, boolean includeUnpublished,
- String queryString, Classification classification, MatchMode matchMode, Set<NamedArea> namedAreas,
+ String queryString, Classification classification, TaxonNode subtree, MatchMode matchMode, Set<NamedArea> namedAreas,
NameSearchOrder order, Integer pageNumber, Integer pageSize, List<String> propertyPaths) ;
/**
public List<UuidAndTitleCache<? extends IdentifiableEntity>> getTaxaByNameForEditor(boolean doTaxa, boolean doSynonyms, boolean doNamesWithoutTaxa,
boolean doMisappliedNames, boolean doCommonNames, boolean includeUnpublished,
- String queryString, Classification classification,
+ String queryString, Classification classification, TaxonNode subtree,
MatchMode matchMode, Set<NamedArea> namedAreas, NameSearchOrder order);
public List<String> taxaByNameNotInDB(List<String> taxonNames);
/**
* @author cmathew
* @since 7 Oct 2015
- *
*/
public class MergeResult<T extends ICdmBase> implements Serializable {
return previousState != null;
}
+ /**
+ * Compares the current state of the entity property (state being persisted) with the previous state
+ * (state to be overwritten in the storage) and returns <code>true</code> in case there is a previous
+ * state and the new state is different.
+ *
+ * @param propertyName
+ * @return
+ */
public boolean propertyChanged(String propertyName){
if(propertyNames == null){
// usually during a save or delete operation
return false;
}
+ if(!hasPreviousState()){
+ // should be covered by propertyNames == null but this check seems to be nececary in rare situations
+ // see the NPE stack strace in #7702 for an example
+ return false;
+ }
int i = 0;
for(String p : propertyNames){
if(p.equals(propertyName)){
RegistrationStatus status;
if(targetEntityStates.propertyChanged("status")){
- status = targetEntityStates.previousPropertyState("status", RegistrationStatus.class);
+ status = targetEntityStates.previousPropertyState("status", RegistrationStatus.class);
} else {
status = ((Registration)targetEntityStates.getEntity()).getStatus();
-
+
}
vr.isPropertyMatch = cdmAuthority.getProperty().contains(status.name());
logger.debug("property is matching");
import eu.etaxonomy.cdm.persistence.dao.reference.IReferenceDao;
import eu.etaxonomy.cdm.persistence.dao.taxon.IClassificationDao;
import eu.etaxonomy.cdm.persistence.dao.taxon.ITaxonDao;
+import eu.etaxonomy.cdm.persistence.dao.taxon.ITaxonNodeDao;
import eu.etaxonomy.cdm.persistence.dto.ClassificationLookupDTO;
import eu.etaxonomy.cdm.test.integration.CdmTransactionalIntegrationTest;
import eu.etaxonomy.cdm.test.unitils.CleanSweepInsertLoadStrategy;
private IClassificationDao classificationDao;
@SpringBeanByType
private IReferenceDao referenceDao;
+ @SpringBeanByType
+ private ITaxonNodeDao taxonNodeDao;
private boolean includeUnpublished;
- private static final String CLASSIFICATION_UUID = "2a5ceebb-4830-4524-b330-78461bf8cb6b";
- private static final String CLASSIFICATION_FULL_UUID = "a71467a6-74dc-4148-9530-484628a5ab0e";
+ private static final UUID FLAT_CLASSIFICATION_UUID = UUID.fromString("2a5ceebb-4830-4524-b330-78461bf8cb6b");
+ private static final UUID CLASSIFICATION_FULL_UUID = UUID.fromString("a71467a6-74dc-4148-9530-484628a5ab0e");
private static final UUID UUID_ABIES = UUID.fromString("19f560d9-a555-4883-9c54-39d04872307c");
private static final UUID UUID_PINACEAE = UUID.fromString("74216ed8-5f04-439e-87e0-500738f5e7fc");
-
+ private static final UUID UUID_ABIES_NODE = UUID.fromString("56b10cf0-9522-407e-9f90-0c2dba263c94");
+ private static final UUID UUID_FLAT_ROOT = UUID.fromString("75202d4e-b2aa-4343-8b78-340a52d15c40");
@Before
public void setUp() {
checkPreconditions();
- Classification classification = classificationDao.load(UUID.fromString(CLASSIFICATION_UUID));
+ Classification classification = classificationDao.load(FLAT_CLASSIFICATION_UUID);
includeUnpublished = true;
// test for the bug in http://dev.e-taxonomy.eu/trac/ticket/2778
Rank rank = Rank.GENUS();
// run both queries in dao method since rank != null
- List<TaxonNode> rootNodes = classificationDao.listRankSpecificRootNodes(classification, rank, includeUnpublished,
+ List<TaxonNode> rootNodes = classificationDao.listRankSpecificRootNodes(classification, null, rank, includeUnpublished,
null, null, null, 0);
- rootNodes.addAll(classificationDao.listRankSpecificRootNodes(classification, rank, includeUnpublished,
+ rootNodes.addAll(classificationDao.listRankSpecificRootNodes(classification, null, rank, includeUnpublished,
null, null, null, 1));
assertEquals(3, rootNodes.size());
rank = null;
// run only fist query in dao method since rank == null
- rootNodes = classificationDao.listRankSpecificRootNodes(classification, rank, includeUnpublished,
+ rootNodes = classificationDao.listRankSpecificRootNodes(classification, null, rank, includeUnpublished,
null, null, null, 0);
assertEquals("The absolut root nodes should be returned", 3, rootNodes.size());
rank = Rank.GENUS();
// run both queries in dao method since rank != null
- rootNodes = classificationDao.listRankSpecificRootNodes(classification, rank, includeUnpublished,
+ rootNodes = classificationDao.listRankSpecificRootNodes(classification, null, rank, includeUnpublished,
null, null, null, 0);
- rootNodes.addAll(classificationDao.listRankSpecificRootNodes(classification, rank, includeUnpublished,
+ rootNodes.addAll(classificationDao.listRankSpecificRootNodes(classification, null, rank, includeUnpublished,
null, null, null, 1));
assertEquals(2, rootNodes.size()); //5002 in unpublished
rank = null;
- rootNodes = classificationDao.listRankSpecificRootNodes(classification, rank, includeUnpublished,
+ rootNodes = classificationDao.listRankSpecificRootNodes(classification, null, rank, includeUnpublished,
null, null, null, 0);
assertEquals("The absolut root nodes should be returned", 2, rootNodes.size());
}
// check preconditions
checkPreconditions();
- Classification classification = classificationDao.load(UUID.fromString(CLASSIFICATION_FULL_UUID));
+ Classification classification = classificationDao.load(CLASSIFICATION_FULL_UUID);
includeUnpublished = false;
Rank rank = Rank.GENUS();
// run both queries in dao method since rank != null
- List<TaxonNode> rootNodes = classificationDao.listRankSpecificRootNodes(classification, rank, includeUnpublished, null, null, null, 0);
- rootNodes.addAll(classificationDao.listRankSpecificRootNodes(classification, rank, includeUnpublished, null, null, null, 1));
+ List<TaxonNode> rootNodes = classificationDao.listRankSpecificRootNodes(classification, null, rank, includeUnpublished, null, null, null, 0);
+ rootNodes.addAll(classificationDao.listRankSpecificRootNodes(classification, null, rank, includeUnpublished, null, null, null, 1));
assertEquals("Only the genus should come back", 1, rootNodes.size());
assertEquals(Rank.GENUS(), rootNodes.get(0).getTaxon().getName().getRank());
assertEquals(UUID_ABIES, rootNodes.get(0).getTaxon().getUuid());
rank = Rank.SUBGENUS();
// run both queries in dao method since rank != null
includeUnpublished = true;
- rootNodes = classificationDao.listRankSpecificRootNodes(classification, rank, includeUnpublished, null, null, null, 0);
- rootNodes.addAll(classificationDao.listRankSpecificRootNodes(classification, rank, includeUnpublished, null, null, null, 1));
+ rootNodes = classificationDao.listRankSpecificRootNodes(classification, null, rank, includeUnpublished, null, null, null, 0);
+ rootNodes.addAll(classificationDao.listRankSpecificRootNodes(classification, null, rank, includeUnpublished, null, null, null, 1));
assertEquals("Only the 2 species should come back", 2, rootNodes.size());
for (TaxonNode tn : rootNodes){
assertEquals(Rank.SPECIES(), tn.getTaxon().getName().getRank());
}
// run both queries in dao method since rank != null
includeUnpublished = false;
- rootNodes = classificationDao.listRankSpecificRootNodes(classification, rank, includeUnpublished, null, null, null, 0);
- rootNodes.addAll(classificationDao.listRankSpecificRootNodes(classification, rank, includeUnpublished, null, null, null, 1));
+ rootNodes = classificationDao.listRankSpecificRootNodes(classification, null, rank, includeUnpublished, null, null, null, 0);
+ rootNodes.addAll(classificationDao.listRankSpecificRootNodes(classification, null, rank, includeUnpublished, null, null, null, 1));
assertEquals("Only the 1 published species should come back", 1, rootNodes.size());
for (TaxonNode tn : rootNodes){
assertEquals(Rank.SPECIES(), tn.getTaxon().getName().getRank());
rank = Rank.SUBFAMILY();
// run both queries in dao method since rank != null
- rootNodes = classificationDao.listRankSpecificRootNodes(classification, rank, includeUnpublished, null, null, null, 0);
- rootNodes.addAll(classificationDao.listRankSpecificRootNodes(classification, rank, includeUnpublished, null, null, null, 1));
+ rootNodes = classificationDao.listRankSpecificRootNodes(classification, null, rank, includeUnpublished, null, null, null, 0);
+ rootNodes.addAll(classificationDao.listRankSpecificRootNodes(classification, null, rank, includeUnpublished, null, null, null, 1));
assertEquals("Only the genus should come back", 1, rootNodes.size());
assertEquals(Rank.GENUS(), rootNodes.get(0).getTaxon().getName().getRank());
assertEquals(UUID_ABIES, rootNodes.get(0).getTaxon().getUuid());
rank = Rank.FAMILY();
// run both queries in dao method since rank != null
- rootNodes = classificationDao.listRankSpecificRootNodes(classification, rank, includeUnpublished, null, null, null, 0);
- rootNodes.addAll(classificationDao.listRankSpecificRootNodes(classification, rank, includeUnpublished, null, null, null, 1));
+ rootNodes = classificationDao.listRankSpecificRootNodes(classification, null, rank, includeUnpublished, null, null, null, 0);
+ rootNodes.addAll(classificationDao.listRankSpecificRootNodes(classification, null, rank, includeUnpublished, null, null, null, 1));
assertEquals("Only the family should come back", 1, rootNodes.size());
assertEquals(Rank.FAMILY(), rootNodes.get(0).getTaxon().getName().getRank());
assertEquals(UUID_PINACEAE, rootNodes.get(0).getTaxon().getUuid());
rank = null;
// run only fist query in dao method since rank == null
- rootNodes = classificationDao.listRankSpecificRootNodes(classification, rank, includeUnpublished, null, null, null, 0);
+ rootNodes = classificationDao.listRankSpecificRootNodes(classification, null, rank, includeUnpublished, null, null, null, 0);
assertEquals("Only the family as the absolut root node should come back", 1, rootNodes.size());
assertEquals(Rank.FAMILY(), rootNodes.get(0).getTaxon().getName().getRank());
assertEquals(UUID_PINACEAE, rootNodes.get(0).getTaxon().getUuid());
}
+ /**
+ * Test listRankSpecificRootNode with an existing classification
+ */
+ @Test
+ @DataSet(value="ClassificationDaoHibernateImplTest.listRankSpecificRootNodes.xml")
+ public void testListRankSpecificRootNodesWithHierarchie_withSubtree() {
+
+ // check preconditions
+ checkPreconditions();
+
+ Classification classification = classificationDao.load(CLASSIFICATION_FULL_UUID);
+ TaxonNode subtree = taxonNodeDao.findByUuid(UUID_ABIES_NODE);
+
+ includeUnpublished = false;
+ Rank rank = null;
+ // run only first query as rank is null
+ List<TaxonNode> rootNodes = classificationDao.listRankSpecificRootNodes(classification, subtree, rank, includeUnpublished, null, null, null, 0);
+ assertEquals("Only 1 node - the Abies node - should come back as root node for the subtree", 1, rootNodes.size());
+ assertEquals(Rank.GENUS(), rootNodes.get(0).getTaxon().getName().getRank());
+ assertEquals(UUID_ABIES, rootNodes.get(0).getTaxon().getUuid());
+
+ rank = Rank.GENUS();
+ // run both queries in dao method since rank != null
+ rootNodes = classificationDao.listRankSpecificRootNodes(classification, subtree, rank, includeUnpublished, null, null, null, 0);
+ rootNodes.addAll(classificationDao.listRankSpecificRootNodes(classification, subtree, rank, includeUnpublished, null, null, null, 1));
+ assertEquals("Only 1 node - the Abies node - should come back", 1, rootNodes.size());
+ assertEquals(Rank.GENUS(), rootNodes.get(0).getTaxon().getName().getRank());
+ assertEquals(UUID_ABIES, rootNodes.get(0).getTaxon().getUuid());
+
+
+ rank = Rank.SUBGENUS();
+ // run both queries in dao method since rank != null
+ includeUnpublished = true;
+ rootNodes = classificationDao.listRankSpecificRootNodes(classification, subtree, rank, includeUnpublished, null, null, null, 0);
+ rootNodes.addAll(classificationDao.listRankSpecificRootNodes(classification, subtree, rank, includeUnpublished, null, null, null, 1));
+ assertEquals("Only the 2 species should come back", 2, rootNodes.size());
+ for (TaxonNode tn : rootNodes){
+ assertEquals(Rank.SPECIES(), tn.getTaxon().getName().getRank());
+ }
+ // same with unpublished
+ includeUnpublished = false;
+ rootNodes = classificationDao.listRankSpecificRootNodes(classification, subtree, rank, includeUnpublished, null, null, null, 0);
+ rootNodes.addAll(classificationDao.listRankSpecificRootNodes(classification, subtree, rank, includeUnpublished, null, null, null, 1));
+ assertEquals("Only the 1 published species should come back", 1, rootNodes.size());
+ for (TaxonNode tn : rootNodes){
+ assertEquals(Rank.SPECIES(), tn.getTaxon().getName().getRank());
+ }
+
+ rank = Rank.SUBFAMILY();
+ // run both queries in dao method since rank != null
+ rootNodes = classificationDao.listRankSpecificRootNodes(classification, subtree, rank, includeUnpublished, null, null, null, 0);
+ rootNodes.addAll(classificationDao.listRankSpecificRootNodes(classification, subtree, rank, includeUnpublished, null, null, null, 1));
+ assertEquals("Only the genus should come back", 1, rootNodes.size());
+ assertEquals(Rank.GENUS(), rootNodes.get(0).getTaxon().getName().getRank());
+ assertEquals(UUID_ABIES, rootNodes.get(0).getTaxon().getUuid());
+
+ rank = Rank.FAMILY();
+ // run both queries in dao method since rank != null
+ rootNodes = classificationDao.listRankSpecificRootNodes(classification, subtree, rank, includeUnpublished, null, null, null, 0);
+ rootNodes.addAll(classificationDao.listRankSpecificRootNodes(classification, subtree, rank, includeUnpublished, null, null, null, 1));
+ assertEquals("Only the genus should come back as family is not in subtree", 1, rootNodes.size());
+ assertEquals(Rank.GENUS(), rootNodes.get(0).getTaxon().getName().getRank());
+ assertEquals(UUID_ABIES, rootNodes.get(0).getTaxon().getUuid());
+
+ //no classification filter
+ //should have no effect as subtree is kind of classification filter
+ TaxonNode rootNode = classification.getRootNode();
+ classification = null;
+ rank = null;
+ rootNodes = classificationDao.listRankSpecificRootNodes(classification, subtree, rank, includeUnpublished, null, null, null, 0);
+ assertEquals("Only 1 node - the Abies node - should come back", 1, rootNodes.size());
+ assertEquals(UUID_ABIES, rootNodes.get(0).getTaxon().getUuid());
+
+ rank = Rank.GENUS();
+ rootNodes = classificationDao.listRankSpecificRootNodes(null, subtree, rank, includeUnpublished, null, null, null, 0);
+ rootNodes.addAll(classificationDao.listRankSpecificRootNodes(classification, null, rank, includeUnpublished, null, null, null, 1));
+ assertEquals("Only 1 node - the Abies node - should come back", 1, rootNodes.size());
+ assertEquals(UUID_ABIES, rootNodes.get(0).getTaxon().getUuid());
+
+ rank = Rank.SUBGENUS();
+ includeUnpublished = true;
+ rootNodes = classificationDao.listRankSpecificRootNodes(classification, subtree, rank, includeUnpublished, null, null, null, 0);
+ rootNodes.addAll(classificationDao.listRankSpecificRootNodes(classification, subtree, rank, includeUnpublished, null, null, null, 1));
+ assertEquals("Only the 2 species should come back", 2, rootNodes.size());
+
+ //with root node
+ subtree = rootNode;
+ rank = null;
+ rootNodes = classificationDao.listRankSpecificRootNodes(classification, subtree, rank, includeUnpublished, null, null, null, 0);
+ assertEquals("Only the family should come back", 1, rootNodes.size());
+ assertEquals(Rank.FAMILY(), rootNodes.get(0).getTaxon().getName().getRank());
+ assertEquals(UUID_PINACEAE, rootNodes.get(0).getTaxon().getUuid());
+
+ rank = Rank.GENUS();
+ rootNodes = classificationDao.listRankSpecificRootNodes(null, subtree, rank, includeUnpublished, null, null, null, 0);
+ rootNodes.addAll(classificationDao.listRankSpecificRootNodes(classification, null, rank, includeUnpublished, null, null, null, 1));
+ assertEquals("Only 1 node - the Abies node - should come back", 1, rootNodes.size());
+ assertEquals(UUID_ABIES, rootNodes.get(0).getTaxon().getUuid());
+
+ rank = Rank.SUBGENUS();
+ includeUnpublished = true;
+ rootNodes = classificationDao.listRankSpecificRootNodes(classification, subtree, rank, includeUnpublished, null, null, null, 0);
+ rootNodes.addAll(classificationDao.listRankSpecificRootNodes(classification, subtree, rank, includeUnpublished, null, null, null, 1));
+ assertEquals("Only the 2 species should come back", 2, rootNodes.size());
+
+
+ //flat hierarchie
+ classification = classificationDao.load(FLAT_CLASSIFICATION_UUID);
+ includeUnpublished = false;
+
+ rank = null;
+ rootNodes = classificationDao.listRankSpecificRootNodes(classification, subtree, rank, includeUnpublished, null, null, null, 0);
+ assertEquals("No subtree should be returned as subtree is not from classification", 0, rootNodes.size());
+
+ subtree = taxonNodeDao.findByUuid(UUID_FLAT_ROOT);
+ rootNodes = classificationDao.listRankSpecificRootNodes(classification, subtree, rank, includeUnpublished, null, null, null, 0);
+// assertEquals("The 2 published species should be returned", 2, rootNodes.size());
+
+ rank = Rank.GENUS();
+ rootNodes = classificationDao.listRankSpecificRootNodes(classification, subtree, rank, includeUnpublished, null, null, null, 0);
+ rootNodes.addAll(classificationDao.listRankSpecificRootNodes(classification, subtree, rank, includeUnpublished, null, null, null, 1));
+ assertEquals(2, rootNodes.size()); //5002 in unpublished
+
+ }
+
/**
* Test listRankSpecificRootNode with all classifications
*/
Rank rank = Rank.GENUS();
// run both queries in dao method since rank != null
- List<TaxonNode> rootNodes = classificationDao.listRankSpecificRootNodes(classification, rank, includeUnpublished, null, null, null, 0);
- rootNodes.addAll(classificationDao.listRankSpecificRootNodes(classification, rank, includeUnpublished, null, null, null, 1));
+ List<TaxonNode> rootNodes = classificationDao.listRankSpecificRootNodes(classification, null, rank, includeUnpublished, null, null, null, 0);
+ rootNodes.addAll(classificationDao.listRankSpecificRootNodes(classification, null, rank, includeUnpublished, null, null, null, 1));
assertEquals("3 Species from no hierarchie and 1 genus from hierarchie should return", 4, rootNodes.size());
rank = null;
// run only fist query in dao method since rank == null
- rootNodes = classificationDao.listRankSpecificRootNodes(classification, rank, includeUnpublished, null, null, null, 0);
+ rootNodes = classificationDao.listRankSpecificRootNodes(classification, null, rank, includeUnpublished, null, null, null, 0);
assertEquals("4 taxa should return (3 species from no hierarchie, 1 family, from hierarchie classification", 4, rootNodes.size());
}
@DataSet(value="ClassificationDaoHibernateImplTest.listRankSpecificRootNodes.xml")
public void testClassificationLookup() {
- Classification classification = classificationDao.load(UUID.fromString(CLASSIFICATION_FULL_UUID));
+ Classification classification = classificationDao.load(CLASSIFICATION_FULL_UUID);
ClassificationLookupDTO classificationLookupDto = classificationDao.classificationLookup(classification);
assertEquals(4, classificationLookupDto.getTaxonIds().size());
}
// 1. create the entities and save them
Classification flatHierarchieClassification = Classification.NewInstance("European Abies");
- flatHierarchieClassification.setUuid(UUID.fromString(CLASSIFICATION_UUID));
+ flatHierarchieClassification.setUuid(FLAT_CLASSIFICATION_UUID);
classificationDao.save(flatHierarchieClassification);
Reference sec = ReferenceFactory.newBook();
// 1. create the entities and save them
Classification fullHierarchieClassification = Classification.NewInstance("European Abies full hierarchie");
- fullHierarchieClassification.setUuid(UUID.fromString(CLASSIFICATION_FULL_UUID));
+ fullHierarchieClassification.setUuid(CLASSIFICATION_FULL_UUID);
classificationDao.save(fullHierarchieClassification);
fullHierarchieClassification.addParentChild(t_pinaceae, t_abies, null, null);
import eu.etaxonomy.cdm.persistence.dao.reference.IReferenceDao;
import eu.etaxonomy.cdm.persistence.dao.taxon.IClassificationDao;
import eu.etaxonomy.cdm.persistence.dao.taxon.ITaxonDao;
+import eu.etaxonomy.cdm.persistence.dao.taxon.ITaxonNodeDao;
import eu.etaxonomy.cdm.persistence.dto.UuidAndTitleCache;
import eu.etaxonomy.cdm.persistence.query.GroupByCount;
import eu.etaxonomy.cdm.persistence.query.GroupByDate;
@SpringBeanByType
private ITaxonDao taxonDao;
+ @SpringBeanByType
+ private ITaxonNodeDao taxonNodeDao;
+
@SpringBeanByType
private IClassificationDao classificationDao;
@SpringBeanByType
private IDefinedTermDao definedTermDao;
- private UUID uuid;
- private UUID sphingidae;
- private UUID acherontia;
- private UUID rethera;
- private UUID retheraSecCdmtest;
- private UUID atroposAgassiz; // a Synonym
- private UUID atroposOken; // a Synonym
- private UUID atroposLeach; // a Synonym
- private UUID acherontiaLachesis;
- private UUID aus;
+ private UUID uuid = UUID.fromString("496b1325-be50-4b0a-9aa2-3ecd610215f2");
+ private UUID sphingidae = UUID.fromString("54e767ee-894e-4540-a758-f906ecb4e2d9");
+ private UUID acherontia = UUID.fromString("c5cc8674-4242-49a4-aada-72d63194f5fa");
+ private UUID rethera = UUID.fromString("a9f42927-e507-4fda-9629-62073a908aae");
+ private UUID retheraSecCdmtest = UUID.fromString("a9f42927-e507-4fda-9629-62073a908aae");
+ private UUID atroposAgassiz = UUID.fromString("d75b2e3d-7394-4ada-b6a5-93175b8751c1"); // a Synonym
+ private UUID atroposOken = UUID.fromString("6bfedf25-6dbc-4d5c-9d56-84f9052f3b2a"); // a Synonym
+ private UUID atroposLeach = UUID.fromString("3da4ab34-6c50-4586-801e-732615899b07"); // a Synonym
+ private UUID acherontiaLachesis = UUID.fromString("b04cc9cb-2b4a-4cc4-a94a-3c93a2158b06");
+ private UUID aus = UUID.fromString("496b1325-be50-4b0a-9aa2-3ecd610215f2");
+
+ private UUID UUID_ACHERONTIA_NODE = UUID.fromString("56b10cf0-9522-407e-9f90-0c2dba263c94");
+ private UUID UUID_CLASSIFICATION2 = UUID.fromString("a71467a6-74dc-4148-9530-484628a5ab0e");
private AuditEvent previousAuditEvent;
private AuditEvent mostRecentAuditEvent;
@Before
public void setUp() {
- uuid = UUID.fromString("496b1325-be50-4b0a-9aa2-3ecd610215f2");
- sphingidae = UUID.fromString("54e767ee-894e-4540-a758-f906ecb4e2d9");
- acherontia = UUID.fromString("c5cc8674-4242-49a4-aada-72d63194f5fa");
- acherontiaLachesis = UUID.fromString("b04cc9cb-2b4a-4cc4-a94a-3c93a2158b06");
- atroposAgassiz = UUID.fromString("d75b2e3d-7394-4ada-b6a5-93175b8751c1");
- atroposOken = UUID.fromString("6bfedf25-6dbc-4d5c-9d56-84f9052f3b2a");
- atroposLeach = UUID.fromString("3da4ab34-6c50-4586-801e-732615899b07");
- rethera = UUID.fromString("a9f42927-e507-4fda-9629-62073a908aae");
- retheraSecCdmtest = UUID.fromString("a9f42927-e507-4fda-9629-62073a908aae");
- aus = UUID.fromString("496b1325-be50-4b0a-9aa2-3ecd610215f2");
previousAuditEvent = new AuditEvent();
previousAuditEvent.setRevisionNumber(1025);
public void testGetTaxaByNameWithMisappliedNames(){
Classification classification = classificationDao.load(classificationUuid);
-
+ TaxonNode subtree = null;
/* NOTE:
* The testdata contains 3 misapplied names (1. nameCache = Aus, 2. nameCache = Rethera, 3. nameCache = Daphnis),
* two contained in the classification used in this test,
*/
//two accepted taxa starting with R in classification "TestBaum"
@SuppressWarnings("rawtypes")
- List<TaxonBase> results = taxonDao.getTaxaByName(doTaxa, noSynonyms, noMisapplied, noCommonNames, false, "R*", classification, MatchMode.BEGINNING,
+ List<TaxonBase> results = taxonDao.getTaxaByName(doTaxa, noSynonyms, noMisapplied, noCommonNames, false, "R*", classification, subtree, MatchMode.BEGINNING,
null, includeUnpublished, null, null, null, null);
Assert.assertEquals("There should be 2 Taxa", 2, results.size());
//three taxa, 2 accepted and 1 misapplied name starting with R
- results = taxonDao.getTaxaByName(doTaxa, noSynonyms, doMisapplied, noCommonNames, false, "R*", null, MatchMode.BEGINNING,
+ results = taxonDao.getTaxaByName(doTaxa, noSynonyms, doMisapplied, noCommonNames, false, "R*", null, subtree, MatchMode.BEGINNING,
null, includeUnpublished, null, null, null, null);
Assert.assertEquals("There should be 3 Taxa", 3, results.size());
//one synonym has no accepted taxon
- results = taxonDao.getTaxaByName(doTaxa, doSynonyms, doMisapplied, noCommonNames, false, "A*", null, MatchMode.BEGINNING,
+ results = taxonDao.getTaxaByName(doTaxa, doSynonyms, doMisapplied, noCommonNames, false, "A*", null, subtree, MatchMode.BEGINNING,
null, includeUnpublished, null, null, null, null);
Assert.assertEquals("There should be 11 Taxa",11, results.size());
//two accepted taxa in classification and 1 misapplied name with accepted name in classification
- results = taxonDao.getTaxaByName(doTaxa, doSynonyms, doMisapplied, noCommonNames, false, "R*", classification, MatchMode.BEGINNING,
+ results = taxonDao.getTaxaByName(doTaxa, doSynonyms, doMisapplied, noCommonNames, false, "R*", classification, subtree, MatchMode.BEGINNING,
null, includeUnpublished, null, null, null, null);
Assert.assertEquals("There should be 3 Taxa", 3, results.size());
//same with unpublished
includeUnpublished = false;
- results = taxonDao.getTaxaByName(doTaxa, doSynonyms, doMisapplied, noCommonNames, false, "R*", classification, MatchMode.BEGINNING,
+ results = taxonDao.getTaxaByName(doTaxa, doSynonyms, doMisapplied, noCommonNames, false, "R*", classification, subtree, MatchMode.BEGINNING,
null, includeUnpublished, null, null, null, null);
Assert.assertEquals("There should be 3 Taxa", 3, results.size());
includeUnpublished = true;
//same as above because all taxa, synonyms and misapplied names starting with R are in the classification
- results = taxonDao.getTaxaByName(doTaxa, doSynonyms, doMisapplied, noCommonNames, false, "R*", null, MatchMode.BEGINNING,
+ results = taxonDao.getTaxaByName(doTaxa, doSynonyms, doMisapplied, noCommonNames, false, "R*", null, subtree, MatchMode.BEGINNING,
null, includeUnpublished, null, null, null, null);
Assert.assertEquals("There should be 3 Taxa", 3, results.size());
//find misapplied names with accepted taxon in the classification, the accepted taxa of two misapplied names are in the classification
- results = taxonDao.getTaxaByName(noTaxa, noSynonyms, doMisapplied, noCommonNames, false, "*", classification, MatchMode.BEGINNING,
+ results = taxonDao.getTaxaByName(noTaxa, noSynonyms, doMisapplied, noCommonNames, false, "*", classification, subtree, MatchMode.BEGINNING,
null, includeUnpublished, null, null, null, null);
Assert.assertEquals("There should be 2 Taxa", 2, results.size());
//find misapplied names beginning with R
- results = taxonDao.getTaxaByName(noTaxa, noSynonyms, doMisapplied, noCommonNames, false, "R*", null, MatchMode.BEGINNING,
+ results = taxonDao.getTaxaByName(noTaxa, noSynonyms, doMisapplied, noCommonNames, false, "R*", null, subtree, MatchMode.BEGINNING,
null, includeUnpublished, null, null, null, null);
Assert.assertEquals("There should be 1 Taxa", 1, results.size());
//find all three misapplied names
- results = taxonDao.getTaxaByName(noTaxa, noSynonyms, doMisapplied, noCommonNames, false, "*", null, MatchMode.BEGINNING,
+ results = taxonDao.getTaxaByName(noTaxa, noSynonyms, doMisapplied, noCommonNames, false, "*", null, subtree, MatchMode.BEGINNING,
null, includeUnpublished, null, null, null, null);
Assert.assertEquals("There should be 3 Taxa", 3, results.size());
@Test
@DataSet (loadStrategy=CleanSweepInsertLoadStrategy.class, value="TaxonDaoHibernateImplTest.testGetTaxaByNameAndArea.xml")
public void testGetTaxaByNameVariants(){
+ TaxonNode subtree = null;
@SuppressWarnings("rawtypes")
- List<TaxonBase> results = taxonDao.getTaxaByName(noTaxa, noSynonyms, noMisapplied, doCommonNames, false, "c*", null, MatchMode.BEGINNING,
+ List<TaxonBase> results = taxonDao.getTaxaByName(noTaxa, noSynonyms, noMisapplied, doCommonNames, false, "c*", null, subtree, MatchMode.BEGINNING,
null, includeUnpublished, null, null, null, null);
Assert.assertEquals("There should be 2 Taxa",2, results.size());
- results = taxonDao.getTaxaByName(noTaxa, noSynonyms, doMisapplied, doCommonNames, false, "R*", null, MatchMode.BEGINNING,
+ results = taxonDao.getTaxaByName(noTaxa, noSynonyms, doMisapplied, doCommonNames, false, "R*", null, subtree, MatchMode.BEGINNING,
null, includeUnpublished, null, null, null, null);
Assert.assertEquals("There should be 1 Taxa", 1, results.size());
- results = taxonDao.getTaxaByName(noTaxa, doSynonyms, doMisapplied, doCommonNames, false, "R*", null, MatchMode.BEGINNING,
+ results = taxonDao.getTaxaByName(noTaxa, doSynonyms, doMisapplied, doCommonNames, false, "R*", null, subtree, MatchMode.BEGINNING,
null, includeUnpublished, null, null, null, null);
Assert.assertEquals("There should be 1 Taxa", 1, results.size());
- results = taxonDao.getTaxaByName(noTaxa, doSynonyms, noMisapplied, doCommonNames, false, "c*", null, MatchMode.BEGINNING,
+ results = taxonDao.getTaxaByName(noTaxa, doSynonyms, noMisapplied, doCommonNames, false, "c*", null, subtree, MatchMode.BEGINNING,
null, includeUnpublished, null, null, null, null);
Assert.assertEquals("There should be 2 Taxa", 2, results.size());
- results = taxonDao.getTaxaByName(doTaxa, noSynonyms, noMisapplied, doCommonNames, false, "c*", null, MatchMode.BEGINNING,
+ results = taxonDao.getTaxaByName(doTaxa, noSynonyms, noMisapplied, doCommonNames, false, "c*", null, subtree, MatchMode.BEGINNING,
null, includeUnpublished, null, null, null, null);
Assert.assertEquals("There should be 2 Taxa", 2, results.size());
Classification classification = classificationDao.load(classificationUuid);
- results = taxonDao.getTaxaByName(noTaxa, noSynonyms, noMisapplied, doCommonNames, false, "c*", classification, MatchMode.BEGINNING,
+ results = taxonDao.getTaxaByName(noTaxa, noSynonyms, noMisapplied, doCommonNames, false, "c*", classification, subtree, MatchMode.BEGINNING,
null, includeUnpublished, null, null, null, null);
Assert.assertEquals("There should be 1 Taxa", 1, results.size());
Set<NamedArea> namedAreas = new HashSet<>();
namedAreas.add((NamedArea)definedTermDao.load(southernAmericaUuid));
- results = taxonDao.getTaxaByName(noTaxa, noSynonyms, noMisapplied, doCommonNames, false, "c*", null, MatchMode.BEGINNING,
+ results = taxonDao.getTaxaByName(noTaxa, noSynonyms, noMisapplied, doCommonNames, false, "c*", null, subtree, MatchMode.BEGINNING,
namedAreas, includeUnpublished, null, null, null, null);
Assert.assertEquals("There should be 1 Taxa", 1, results.size());
}
@Test
@DataSet
public void testGetTaxaByNameForEditor() {
+ TaxonNode subtree = null;
Reference sec = referenceDao.findById(1);
assert sec != null : "sec must exist";
@SuppressWarnings("rawtypes")
List<UuidAndTitleCache<? extends IdentifiableEntity>> results = taxonDao.getTaxaByNameForEditor(
- doTaxa, doSynonyms, noMisapplied, noCommonNames, false, includeUnpublished, "Acher", null, MatchMode.BEGINNING, null, null);
+ doTaxa, doSynonyms, noMisapplied, noCommonNames, false, includeUnpublished, "Acher", null, subtree, MatchMode.BEGINNING, null, null);
assertNotNull("getTaxaByName should return a List", results);
assertFalse("The list should not be empty", results.isEmpty());
assertEquals(4, results.size());
-
- results = taxonDao.getTaxaByNameForEditor(doTaxa, doSynonyms, noMisapplied, noCommonNames, false,includeUnpublished, "A",null, MatchMode.BEGINNING, null, null);
+ results = taxonDao.getTaxaByNameForEditor(doTaxa, doSynonyms, noMisapplied, noCommonNames, false,includeUnpublished, "A", null, subtree, MatchMode.BEGINNING, null, null);
assertNotNull("getTaxaByName should return a List", results);
assertEquals(7, results.size());
-
- results = taxonDao.getTaxaByNameForEditor(doTaxa, noSynonyms, noMisapplied, noCommonNames, false, includeUnpublished, "A", null, MatchMode.BEGINNING, null, null);
+ results = taxonDao.getTaxaByNameForEditor(doTaxa, noSynonyms, noMisapplied, noCommonNames, false, includeUnpublished, "A", null, subtree, MatchMode.BEGINNING, null, null);
assertNotNull("getTaxaByName should return a List", results);
assertEquals(5, results.size());
assertEquals(results.get(0).getType(), Taxon.class);
- results = taxonDao.getTaxaByNameForEditor(noTaxa, doSynonyms, noMisapplied, noCommonNames, false, includeUnpublished, "A", null, MatchMode.BEGINNING, null, null);
+ results = taxonDao.getTaxaByNameForEditor(noTaxa, doSynonyms, noMisapplied, noCommonNames, false, includeUnpublished, "A", null, subtree, MatchMode.BEGINNING, null, null);
assertNotNull("getTaxaByName should return a List", results);
assertEquals(2, results.size());
assertEquals(results.get(0).getType(), Synonym.class);
- results = taxonDao.getTaxaByNameForEditor(doTaxa, doSynonyms, noMisapplied, noCommonNames, false,includeUnpublished, "Aus", null, MatchMode.EXACT, null, null);
+ results = taxonDao.getTaxaByNameForEditor(doTaxa, doSynonyms, noMisapplied, noCommonNames, false,includeUnpublished, "Aus", null, subtree, MatchMode.EXACT, null, null);
assertNotNull("getTaxaByName should return a List", results);
assertEquals("Results list should contain one entity",1,results.size());
- results = taxonDao.getTaxaByNameForEditor(doTaxa, doSynonyms, doMisapplied, noCommonNames, false, includeUnpublished, "A", null, MatchMode.BEGINNING, null, null);
+ results = taxonDao.getTaxaByNameForEditor(doTaxa, doSynonyms, doMisapplied, noCommonNames, false, includeUnpublished, "A", null, subtree, MatchMode.BEGINNING, null, null);
assertNotNull("getTaxaByName should return a List", results);
assertEquals("Results list should contain one entity", 8, results.size());
@Test
@DataSet(loadStrategy=CleanSweepInsertLoadStrategy.class, value="TaxonDaoHibernateImplTest.testGetTaxaByNameAndArea.xml")
public void testGetTaxaByNameAndArea() {
-
+ TaxonNode subtree = null;
Set<NamedArea> namedAreas = new HashSet<>();
namedAreas.add((NamedArea)definedTermDao.load(northernAmericaUuid));
//namedAreas.add((NamedArea)definedTermDao.load(southernAmericaUuid));
//namedAreas.add((NamedArea)definedTermDao.load(antarcticaUuid));
- Classification taxonmicTree = classificationDao.findByUuid(classificationUuid);
+ Classification classification = classificationDao.findByUuid(classificationUuid);
// prepare some synonym relation ships for some tests
Synonym synAtroposAgassiz = (Synonym)taxonDao.findByUuid(atroposAgassiz);
//long numberOfTaxa = taxonDao.countTaxaByName(Taxon.class, "Rethera", null, MatchMode.BEGINNING, namedAreas);
@SuppressWarnings("rawtypes")
- List<TaxonBase> results = taxonDao.getTaxaByName(doTaxa, noSynonyms, noMisapplied, noCommonNames, false, "Rethera", null, MatchMode.BEGINNING, namedAreas,
+ List<TaxonBase> results = taxonDao.getTaxaByName(doTaxa, noSynonyms, noMisapplied, noCommonNames, false, "Rethera", null, subtree, MatchMode.BEGINNING, namedAreas,
includeUnpublished, null, null, null, null);
assertNotNull("getTaxaByName should return a List", results);
assertTrue("expected to find two taxa but found "+results.size(), results.size() == 2);
- results = taxonDao.getTaxaByName(noTaxa, noSynonyms, noMisapplied, doCommonNames, false, "com*", null, MatchMode.BEGINNING, namedAreas,
+ results = taxonDao.getTaxaByName(noTaxa, noSynonyms, noMisapplied, doCommonNames, false, "com*", null, subtree, MatchMode.BEGINNING, namedAreas,
includeUnpublished, null, null, null, null);
assertNotNull("getTaxaByName should return a List", results);
assertTrue("expected to find one taxon but found "+results.size(), results.size() == 1);
// 2. searching for a taxon (Rethera) contained in a specific classification
- results = taxonDao.getTaxaByName(doTaxa, noSynonyms, noMisapplied, noCommonNames, false, "Rethera", taxonmicTree, MatchMode.BEGINNING, namedAreas,
+ results = taxonDao.getTaxaByName(doTaxa, noSynonyms, noMisapplied, noCommonNames, false, "Rethera", classification, subtree, MatchMode.BEGINNING, namedAreas,
includeUnpublished, null, null, null, null);
assertNotNull("getTaxaByName should return a List", results);
assertTrue("expected to find one taxon but found "+results.size(), results.size() == 1);
// 3. searching for Synonyms
- results = taxonDao.getTaxaByName(noTaxa, doSynonyms, noMisapplied, noCommonNames, false, "Atropo", null, MatchMode.ANYWHERE, null,
+ results = taxonDao.getTaxaByName(noTaxa, doSynonyms, noMisapplied, noCommonNames, false, "Atropo", null, subtree, MatchMode.ANYWHERE, null,
includeUnpublished, null, null, null, null);
assertNotNull("getTaxaByName should return a List", results);
/*System.err.println(results.get(0).getTitleCache() + " - " +results.get(1).getTitleCache() + " - " +results.get(2).getTitleCache() );
assertTrue("expected to find three taxa but found "+results.size(), results.size() == 3);
// 4. searching for Synonyms
- results = taxonDao.getTaxaByName(noTaxa, doSynonyms, noMisapplied, noCommonNames, false,"Atropo", null, MatchMode.BEGINNING, null,
+ results = taxonDao.getTaxaByName(noTaxa, doSynonyms, noMisapplied, noCommonNames, false,"Atropo", null, subtree, MatchMode.BEGINNING, null,
includeUnpublished, null, null, null, null);
assertNotNull("getTaxaByName should return a List", results);
assertTrue("expected to find three taxa but found "+results.size(), results.size() == 3);
// 5. searching for a Synonyms and Taxa
- results = taxonDao.getTaxaByName(doTaxa, doSynonyms, noMisapplied, noCommonNames, false,"A", null, MatchMode.BEGINNING, namedAreas,
+ results = taxonDao.getTaxaByName(doTaxa, doSynonyms, noMisapplied, noCommonNames, false,"A", null, subtree, MatchMode.BEGINNING, namedAreas,
includeUnpublished, null, null, null, null);
//only five taxa have a distribution
assertNotNull("getTaxaByName should return a List", results);
/**
- * Test method for {@link eu.etaxonomy.cdm.persistence.dao.hibernate.taxon.TaxonDaoHibernateImpl#findByNameTitleCache(Class<? extends TaxonBase>clazz, String queryString, Classification classification, MatchMode matchMode, Set<NamedArea> namedAreas, Integer pageNumber, Integer pageSize, List<String> propertyPaths)}
+ * Test method for {@link eu.etaxonomy.cdm.persistence.dao.hibernate.taxon.TaxonDaoHibernateImpl#findByNameTitleCache(Class<? extends TaxonBase>clazz, String queryString, Classification classification, TaxonNode subtree, MatchMode matchMode, Set<NamedArea> namedAreas, Integer pageNumber, Integer pageSize, List<String> propertyPaths)}
* restricting the search by a set of Areas.
*/
@Test
@DataSet(loadStrategy=CleanSweepInsertLoadStrategy.class, value="TaxonDaoHibernateImplTest.testGetTaxaByNameAndArea.xml")
public void testFindByNameTitleCache() {
+ TaxonNode subtree = null;
Set<NamedArea> namedAreas = new HashSet<>();
namedAreas.add((NamedArea)definedTermDao.load(northernAmericaUuid));
Synonym synAtroposAgassiz = (Synonym)taxonDao.findByUuid(atroposAgassiz);
Taxon taxonRethera = (Taxon)taxonDao.findByUuid(rethera);
taxonRethera.addSynonym(synAtroposAgassiz, SynonymType.SYNONYM_OF());
- logger.warn("addSynonym(..)");
+ //logger.warn("addSynonym(..)");
this.taxonDao.clear();
Synonym synAtroposLeach = (Synonym)taxonDao.findByUuid(atroposLeach);
Taxon taxonRetheraSecCdmtest = (Taxon)taxonDao.findByUuid(retheraSecCdmtest);
taxonRetheraSecCdmtest.addSynonym(synAtroposLeach, SynonymType.SYNONYM_OF());
this.taxonDao.clear();
+
// 1. searching for a taxon (Rethera)
//long numberOfTaxa = taxonDao.countTaxaByName(Taxon.class, "Rethera", null, MatchMode.BEGINNING, namedAreas);
@SuppressWarnings("rawtypes")
- List<TaxonBase> results = taxonDao.findByNameTitleCache(true, false, includeUnpublished, "Rethera Rothschild & Jordan, 1903", null, MatchMode.EXACT, namedAreas,
+ List<TaxonBase> results = taxonDao.findByNameTitleCache(doTaxa, noSynonyms, includeUnpublished, "Rethera Rothschild & Jordan, 1903", null, subtree, MatchMode.EXACT, namedAreas,
null, null, null, null);
assertNotNull("getTaxaByName should return a List", results);
assertTrue("expected to find two taxa but found "+results.size(), results.size() == 2);
// 2. searching for a taxon (Rethera) contained in a specific classification
- results = taxonDao.findByNameTitleCache(true, false, includeUnpublished, "Rethera Rothschild & Jordan, 1903", classification, MatchMode.EXACT, namedAreas,
+ results = taxonDao.findByNameTitleCache(doTaxa, noSynonyms, includeUnpublished, "Rethera Rothschild & Jordan, 1903", classification, subtree, MatchMode.EXACT, namedAreas,
null, null, null, null);
assertNotNull("getTaxaByName should return a List", results);
assertTrue("expected to find one taxon but found "+results.size(), results.size() == 1);
-
// 3. searching for Synonyms
- results = taxonDao.findByNameTitleCache(false, true, includeUnpublished, "*Atropo", null, MatchMode.ANYWHERE, null,
+ results = taxonDao.findByNameTitleCache(noTaxa, doSynonyms, includeUnpublished, "*Atropo", null, subtree, MatchMode.ANYWHERE, null,
null, null, null, null);
assertNotNull("getTaxaByName should return a List", results);
assertTrue("expected to find two taxa but found "+results.size(), results.size() == 2);
// 4. searching for Synonyms
- results = taxonDao.findByNameTitleCache(false, true, includeUnpublished, "Atropo", null, MatchMode.BEGINNING, null,
+ results = taxonDao.findByNameTitleCache(noTaxa, doSynonyms, includeUnpublished, "Atropo", null, subtree, MatchMode.BEGINNING, null,
null, null, null, null);
assertNotNull("getTaxaByName should return a List", results);
assertTrue("expected to find two taxa but found "+results.size(), results.size() == 2);
-
// 5. searching for a Synonyms and Taxa
// attache a synonym first
Synonym syn = (Synonym)taxonDao.findByUuid(this.atroposLeach);
tax.addSynonym(syn, SynonymType.HETEROTYPIC_SYNONYM_OF());
taxonDao.save(tax);
- results = taxonDao.findByNameTitleCache(true, true, includeUnpublished, "A", null, MatchMode.BEGINNING, namedAreas,
+ results = taxonDao.findByNameTitleCache(doTaxa, doSynonyms, includeUnpublished, "A", null, subtree, MatchMode.BEGINNING, namedAreas,
null, null, null, null);
assertNotNull("getTaxaByName should return a List", results);
assertTrue("expected to find 8 taxa but found "+results.size(), results.size() == 8);
@Test
@DataSet(loadStrategy=CleanSweepInsertLoadStrategy.class, value="TaxonDaoHibernateImplTest.testGetTaxaByNameAndArea.xml")
public void testTaxonNameInTwoClassifications(){
+ TaxonNode subtree = null;
List<String> propertyPaths = new ArrayList<>();
propertyPaths.add("taxonNodes");
@SuppressWarnings("rawtypes")
- List<TaxonBase> taxa = taxonDao.getTaxaByName(doTaxa, doSynonyms, noMisapplied, noCommonNames, false,"P", null, MatchMode.BEGINNING,
+ List<TaxonBase> taxa = taxonDao.getTaxaByName(doTaxa, doSynonyms, noMisapplied, noCommonNames, false,"P", null, subtree, MatchMode.BEGINNING,
null, includeUnpublished, null, null, null, null);
Taxon taxon = (Taxon)taxa.get(0);
Set<TaxonNode> nodes = taxon.getTaxonNodes();
@Test
@DataSet(loadStrategy=CleanSweepInsertLoadStrategy.class, value="TaxonDaoHibernateImplTest.testGetTaxaByNameAndArea.xml")
public void testGetTaxaByNameProParteSynonym(){
-
+ TaxonNode subtree = null;
@SuppressWarnings("rawtypes")
- List<TaxonBase> taxa = taxonDao.getTaxaByName(noTaxa, doSynonyms, noMisapplied, noCommonNames, false, "A", null,
+ List<TaxonBase> taxa = taxonDao.getTaxaByName(noTaxa, doSynonyms, noMisapplied, noCommonNames, false, "A", null,subtree,
MatchMode.BEGINNING, null, includeUnpublished, null, null, null, null);
Assert.assertEquals("2 synonyms and 1 pro parte synonym should be returned.", 3, taxa.size());
assertTrue("Pro parte should exist", existsInCollection(taxa, acherontiaLachesis));
assertTrue("2. normal synonym should exist", existsInCollection(taxa, atroposOken));
//TODO shouldn't we also find orphaned synonyms (without accepted taxon) like Atropos Leach?
- taxa = taxonDao.getTaxaByName(noTaxa, doSynonyms, noMisapplied, noCommonNames, false, "A", null,
+ taxa = taxonDao.getTaxaByName(noTaxa, doSynonyms, noMisapplied, noCommonNames, false, "A", null,subtree,
MatchMode.BEGINNING, null, NO_UNPUBLISHED, null, null, null, null);
Assert.assertEquals("2 synonyms and no pro parte synonym should be returned.", 2, taxa.size());
assertTrue("Normal synonym should exist", existsInCollection(taxa, atroposAgassiz));
assertTrue("2. normal synonym should exist", existsInCollection(taxa, atroposOken));
- taxa = taxonDao.getTaxaByName(noTaxa, noSynonyms, doMisapplied, noCommonNames, false, "A", null,
+ taxa = taxonDao.getTaxaByName(noTaxa, noSynonyms, doMisapplied, noCommonNames, false, "A", null,subtree,
MatchMode.BEGINNING, null, includeUnpublished, null, null, null, null);
Assert.assertEquals("1 misapplied name, no pro parte synonym should be returned.", 1, taxa.size());
assertTrue("Pro parte should exist", existsInCollection(taxa, aus));
@Test
@DataSet
public void testCountTaxaByName() {
- long numberOfTaxa = taxonDao.countTaxaByName(true, false, false, false,false, "A", null, MatchMode.BEGINNING, null, includeUnpublished);
+ TaxonNode subtree = null;
+ Classification classification= null;
+ long numberOfTaxa = taxonDao.countTaxaByName(doTaxa, noSynonyms, noMisapplied, noCommonNames,false, "A", classification, subtree, MatchMode.BEGINNING, null, includeUnpublished);
assertEquals(5, numberOfTaxa);
- numberOfTaxa = taxonDao.countTaxaByName(true, false, false, false, false,"Smerinthus kindermannii", null, MatchMode.EXACT, null, includeUnpublished);
+ numberOfTaxa = taxonDao.countTaxaByName(doTaxa, noSynonyms, noMisapplied, noCommonNames, false, "S", classification, subtree, MatchMode.BEGINNING, null, includeUnpublished);
+ assertEquals("Sphingidae, Smerinthus, Smerinthus kindermannii and Sphingonaepiopsis expected", 4, numberOfTaxa);
+ numberOfTaxa = taxonDao.countTaxaByName(doTaxa, noSynonyms, noMisapplied, noCommonNames, false, "Smerinthus kindermannii", classification, subtree, MatchMode.EXACT, null, includeUnpublished);
assertEquals(1, numberOfTaxa);
- numberOfTaxa = taxonDao.countTaxaByName(false, true, false, false, false,"A", null, MatchMode.BEGINNING, null, includeUnpublished);
+ numberOfTaxa = taxonDao.countTaxaByName(noTaxa, doSynonyms, noMisapplied, noCommonNames, false, "A", classification, subtree, MatchMode.BEGINNING, null, includeUnpublished);
assertEquals(2, numberOfTaxa);
- numberOfTaxa = taxonDao.countTaxaByName(true, true, false, false, false,"A", null, MatchMode.BEGINNING, null, includeUnpublished);
+ numberOfTaxa = taxonDao.countTaxaByName(doTaxa, doSynonyms, noMisapplied, noCommonNames, false, "A", classification, subtree, MatchMode.BEGINNING, null, includeUnpublished);
assertEquals(7, numberOfTaxa);
- numberOfTaxa = taxonDao.countTaxaByName(true, true, false, false,false, "Aasfwerfwf fffe", null, MatchMode.BEGINNING, null, includeUnpublished);
+ numberOfTaxa = taxonDao.countTaxaByName(doTaxa, doSynonyms, noMisapplied, noCommonNames, false, "Aasfwerfwf fffe", classification, subtree, MatchMode.BEGINNING, null, includeUnpublished);
+ assertEquals(0, numberOfTaxa);
+
+ subtree = taxonNodeDao.findByUuid(UUID_ACHERONTIA_NODE);
+ numberOfTaxa = taxonDao.countTaxaByName(doTaxa, noSynonyms, noMisapplied, noCommonNames, false, "A", classification, subtree, MatchMode.BEGINNING, null, includeUnpublished);
+ assertEquals("Acherontia and 2 A. species expected", 3, numberOfTaxa);
+ numberOfTaxa = taxonDao.countTaxaByName(doTaxa, noSynonyms, noMisapplied, noCommonNames, false, "S", classification, subtree, MatchMode.BEGINNING, null, includeUnpublished);
+ assertEquals("", 0, numberOfTaxa);
+ numberOfTaxa = taxonDao.countTaxaByName(doTaxa, noSynonyms, noMisapplied, noCommonNames, false, "Smerinthus kindermannii", classification, subtree, MatchMode.EXACT, null, includeUnpublished);
+ assertEquals("Smerinthus is not in subtree", 0, numberOfTaxa);
+ numberOfTaxa = taxonDao.countTaxaByName(noTaxa, doSynonyms, noMisapplied, noCommonNames, false, "A", classification, subtree, MatchMode.BEGINNING, null, includeUnpublished);
+ assertEquals("Atropos Agassiz and Atropos Oken expected as Synonyms", 2, numberOfTaxa);
+ numberOfTaxa = taxonDao.countTaxaByName(doTaxa, doSynonyms, noMisapplied, noCommonNames, false, "A", classification, subtree, MatchMode.BEGINNING, null, includeUnpublished);
+ assertEquals("The above accepted and synonyms expected", 5, numberOfTaxa);
+ numberOfTaxa = taxonDao.countTaxaByName(doTaxa, doSynonyms, noMisapplied, noCommonNames, false, "Aasfwerfwf fffe", classification, subtree, MatchMode.BEGINNING, null, includeUnpublished);
+ assertEquals(0, numberOfTaxa);
+
+ classification = classificationDao.findByUuid(UUID_CLASSIFICATION2);
+ subtree = null;
+ numberOfTaxa = taxonDao.countTaxaByName(doTaxa, noSynonyms, noMisapplied, noCommonNames, false, "A", classification, subtree, MatchMode.BEGINNING, null, includeUnpublished);
+ assertEquals("Acherontia and 2 A. species expected", 3, numberOfTaxa);
+ numberOfTaxa = taxonDao.countTaxaByName(doTaxa, noSynonyms, noMisapplied, noCommonNames, false, "S", classification, subtree, MatchMode.BEGINNING, null, includeUnpublished);
+ assertEquals("Sphingidae expected", 1, numberOfTaxa);
+ numberOfTaxa = taxonDao.countTaxaByName(doTaxa, noSynonyms, noMisapplied, noCommonNames, false, "Smerinthus kindermannii", classification, subtree, MatchMode.EXACT, null, includeUnpublished);
+ assertEquals("Smerinthus is not in subtree", 0, numberOfTaxa);
+ numberOfTaxa = taxonDao.countTaxaByName(noTaxa, doSynonyms, noMisapplied, noCommonNames, false, "A", classification, subtree, MatchMode.BEGINNING, null, includeUnpublished);
+ assertEquals("Atropos Agassiz and Atropos Oken expected as Synonyms", 2, numberOfTaxa);
+ numberOfTaxa = taxonDao.countTaxaByName(doTaxa, doSynonyms, noMisapplied, noCommonNames, false, "A", classification, subtree, MatchMode.BEGINNING, null, includeUnpublished);
+ assertEquals("The above accepted and synonyms expected", 5, numberOfTaxa);
+ numberOfTaxa = taxonDao.countTaxaByName(doTaxa, doSynonyms, noMisapplied, noCommonNames, false, "Aasfwerfwf fffe", classification, subtree, MatchMode.BEGINNING, null, includeUnpublished);
assertEquals(0, numberOfTaxa);
-// FIXME implement test for search in specific classification
-// Reference reference = referenceDao.findByUuid(UUID.fromString("596b1325-be50-4b0a-9aa2-3ecd610215f2"));
-// numberOfTaxa = taxonDao.countTaxaByName("A*", MatchMode.BEGINNING, SelectMode.ALL, null, null);
-// assertEquals(numberOfTaxa, 2);
}
@Test
public void testListChildren(){
boolean includeUnpublished;
Taxon t_acherontia = (Taxon) taxonDao.load(ACHERONTIA_UUID);
-
+ TaxonNode subtree = null;
includeUnpublished = true;
+
Classification classification = classificationDao.load(ClassificationUuid);
List<TaxonNode> children = classificationDao.listChildrenOf(
- t_acherontia, classification, includeUnpublished, null, null, null);
+ t_acherontia, classification, subtree, includeUnpublished, null, null, null);
assertNotNull(children);
assertEquals(2, children.size());
includeUnpublished = false;
children = classificationDao.listChildrenOf(
- t_acherontia, classification, includeUnpublished, null, null, null);
+ t_acherontia, classification, subtree, includeUnpublished, null, null, null);
assertNotNull(children);
assertEquals(1, children.size()); //1 is unpublished
import java.util.Set;
import java.util.UUID;
-import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import org.hibernate.FlushMode;
import org.hibernate.Hibernate;
assureSessionClear();
- Logger.getLogger(AdvancedBeanInitializer.class).setLevel(Level.TRACE);
+ //Logger.getLogger(AdvancedBeanInitializer.class).setLevel(Level.TRACE);
Taxon taxon = (Taxon)taxonDao.load(taxonUuid, Arrays.asList("$"));
assertTrue(Hibernate.isInitialized(taxon.getName()));
assureSessionClear();
- Logger.getLogger(AdvancedBeanInitializer.class).setLevel(Level.TRACE);
+ //Logger.getLogger(AdvancedBeanInitializer.class).setLevel(Level.TRACE);
deacivatedAutoIntitializers = clearAutoinitializers();
// load bean with autoinitializers deactivated
<REFERENCE ID="1" CREATED="2008-12-10 09:56:07.0" UUID="596b1325-be50-4b0a-9aa2-3ecd610215f2" UPDATED="2008-12-10 09:56:07.253" TITLECACHE="Lorem ipsum" PROTECTEDTITLECACHE="true" PROTECTEDABBREVTITLECACHE="false" ABBREVTITLECACHE="Sp. Pl." ABBREVTITLE="Sp. Pl." NOMENCLATURALLYRELEVANT="false" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1"/>\r
<REFERENCE ID="2" CREATED="2008-12-10 09:56:07.0" UUID="ad4322b7-4b05-48af-be70-f113e46c545e" UPDATED="2008-12-10 09:56:07.253" TITLECACHE="cate-sphingidae.org" PROTECTEDTITLECACHE="true" PROTECTEDABBREVTITLECACHE="false" ABBREVTITLECACHE="Sp. Pl." ABBREVTITLE="Sp. Pl." NOMENCLATURALLYRELEVANT="false" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1"/>\r
<REFERENCE ID="3" CREATED="2008-12-10 09:56:07.0" UUID="3eea6f96-0682-4025-8cdd-aaaf7c915ae2" UPDATED="2008-12-10 09:56:07.253" TITLECACHE="cate-araceae.org" PROTECTEDTITLECACHE="true" PROTECTEDABBREVTITLECACHE="false" ABBREVTITLECACHE="Sp. Pl." ABBREVTITLE="Sp. Pl." NOMENCLATURALLYRELEVANT="false" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1"/>\r
+ <CLASSIFICATION ID="5000" CREATED="2015-06-18 09:36:39.0" UUID="2a5ceebb-4830-4524-b330-78461bf8cb6b" PROTECTEDTITLECACHE="true" TITLECACHE="Classification 1" ROOTNODE_ID="5000"/>\r
+ <CLASSIFICATION ID="5001" CREATED="2015-06-18 09:36:39.0" UUID="a71467a6-74dc-4148-9530-484628a5ab0e" PROTECTEDTITLECACHE="true" TITLECACHE="Classification 2" ROOTNODE_ID="5001"/>\r
+ <TAXONNODE ID="5000" CREATED="2015-06-18 09:36:39.0" UUID="75202d4e-b2aa-4343-8b78-340a52d15c40" SORTINDEX="-1" TREEINDEX="#t5000#5000#" COUNTCHILDREN="3" TAXON_ID="[null]" EXCLUDED="FALSE" UNPLACED="FALSE" CLASSIFICATION_ID="5000" PARENT_ID="[null]"/>\r
+ <TAXONNODE ID="5002" CREATED="2015-06-18 09:36:39.0" UUID="6b76c838-bd8f-43f9-8fa9-077cd222a9b2" SORTINDEX="0" TREEINDEX="#t5000#5000#5002#" COUNTCHILDREN="0" TAXON_ID="1" EXCLUDED="FALSE" UNPLACED="FALSE" CLASSIFICATION_ID="5000" PARENT_ID="5000"/>\r
+ <TAXONNODE ID="5004" CREATED="2015-06-18 09:36:39.0" UUID="ebbae10f-d179-4a08-9939-9fed0a7f1433" SORTINDEX="2" TREEINDEX="#t5000#5000#5004#" COUNTCHILDREN="0" TAXON_ID="5002" EXCLUDED="FALSE" UNPLACED="FALSE" CLASSIFICATION_ID="5000" PARENT_ID="5000"/>\r
+ <TAXONNODE ID="5001" CREATED="2015-06-18 09:36:39.0" UUID="7b95a2a6-2c6e-4b8e-a91a-7a1d995490f9" SORTINDEX="-1" TREEINDEX="#t5001#5001#" COUNTCHILDREN="1" TAXON_ID="[null]" EXCLUDED="FALSE" UNPLACED="FALSE" CLASSIFICATION_ID="5001" PARENT_ID="[null]"/>\r
+ <TAXONNODE ID="5007" CREATED="2015-06-18 09:36:40.0" UUID="bcdf945f-1f02-423e-883d-fe89e0af93e4" SORTINDEX="0" TREEINDEX="#t5001#5001#5007#" COUNTCHILDREN="1" TAXON_ID="3" EXCLUDED="FALSE" UNPLACED="FALSE" CLASSIFICATION_ID="5001" PARENT_ID="5001"/>\r
+ <TAXONNODE ID="5006" CREATED="2015-06-18 09:36:40.0" UUID="56b10cf0-9522-407e-9f90-0c2dba263c94" SORTINDEX="0" TREEINDEX="#t5001#5001#5007#5006#" COUNTCHILDREN="2" TAXON_ID="15" EXCLUDED="FALSE" UNPLACED="FALSE" CLASSIFICATION_ID="5001" PARENT_ID="5007"/>\r
+ <TAXONNODE ID="5005" CREATED="2015-06-18 09:36:40.0" UUID="ba290371-a72b-43bf-a913-8a03c79755c7" SORTINDEX="0" TREEINDEX="#t5001#5001#5007#5006#5005#" COUNTCHILDREN="0" TAXON_ID="35" EXCLUDED="FALSE" UNPLACED="FALSE" CLASSIFICATION_ID="5001" PARENT_ID="5006"/>\r
+ <TAXONNODE ID="5008" CREATED="2015-06-18 09:36:40.0" UUID="80150ae0-e1e6-42a4-b224-21e099756c3d" SORTINDEX="1" TREEINDEX="#t5001#5001#5007#5006#5008#" COUNTCHILDREN="0" TAXON_ID="36" EXCLUDED="FALSE" UNPLACED="FALSE" CLASSIFICATION_ID="5001" PARENT_ID="5006"/>\r
+ \r
+ \r
<TAXONBASE DTYPE="Taxon" ID="1" SEC_ID="1" CREATED="2008-01-10 09:56:07.0" UUID="496b1325-be50-4b0a-9aa2-3ecd610215f2" UPDATED="2008-12-10 09:56:07.253" TITLECACHE="Aus sec. ???" PROTECTEDTITLECACHE="true" TAXONSTATUSUNKNOWN="false" PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" NAME_ID="1" />\r
<TAXONBASE DTYPE="Taxon" ID="3" SEC_ID="2" CREATED="2008-01-12 09:56:07.0" UUID="54e767ee-894e-4540-a758-f906ecb4e2d9" UPDATED="2008-12-10 09:56:07.253" TITLECACHE="Sphingidae Linnaeus, 1758 sec. cate-sphingidae.org" PROTECTEDTITLECACHE="true" TAXONSTATUSUNKNOWN="false" PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" NAME_ID="3"/>\r
<TAXONBASE DTYPE="Taxon" ID="5" SEC_ID="2" CREATED="2008-02-04 09:56:07.0" UUID="17233b5e-74e7-42fc-bc37-522684657ed4" UPDATED="2008-12-10 09:56:07.253" TITLECACHE="Smerinthus Latreille, 1802 sec. cate-sphingidae.org" PROTECTEDTITLECACHE="true" PUBLISH="true" TAXONSTATUSUNKNOWN="false" DOUBTFUL="false" USENAMECACHE="false" NAME_ID="5"/>\r
<parent>
<artifactId>cdmlib-parent</artifactId>
<groupId>eu.etaxonomy</groupId>
- <version>5.2.0</version>
+ <version>5.3.0</version>
<relativePath>../pom.xml</relativePath>
</parent>
<modelVersion>4.0.0</modelVersion>
Object resultObject = null;
try {
if (EntityType.CLASSIFICATION.equals(entityType)) {
- resultObject = classificationController.getChildNodes(uuid,
+ resultObject = classificationController.getChildNodes(uuid, null,
null, null);
} else if (EntityType.TAXON_NODE.equals(entityType)) {
resultObject = taxonNodePrintAppController
<parent>
<artifactId>cdmlib-parent</artifactId>
<groupId>eu.etaxonomy</groupId>
- <version>5.2.0</version>
+ <version>5.3.0</version>
<relativePath>../pom.xml</relativePath>
</parent>
# avoid vast amount of "getCitation not yet implemented" & "getProblems not yet implemented" messages
log4j.logger.eu.etaxonomy.cdm.model.name.TaxonName = ERROR
log4j.logger.eu.etaxonomy.cdm.database.UpdatableRoutingDataSource = INFO
-log4j.logger.eu.etaxonomy.cdm.persistence.dao.initializer.AbstractBeanInitializer = ERROR
-log4j.logger.eu.etaxonomy.cdm.persistence.dao.initializer.AdvancedBeanInitializer=ERROR
+log4j.logger.eu.etaxonomy.cdm.persistence.dao.initializer = ERROR
log4j.logger.eu.etaxonomy.cdm.remote.controller = INFO
log4j.logger.eu.etaxonomy.cdm.remote.controller.interceptor = WARN
#log4j.logger.eu.etaxonomy.cdm.remote.json.processor.AbstractCdmBeanProcessor = DEBUG
http://www.springframework.org/schema/context\r
http://www.springframework.org/schema/context/spring-context-2.5.xsd">\r
\r
- <!-- enable processing of annotations such as @Autowired and @Configuration -->\r
- <context:annotation-config/>\r
-\r
<bean class="eu.etaxonomy.cdm.opt.config.DataSourceConfigurer" >\r
</bean>\r
\r
# avoid vast amount of "getCitation not yet implemented" & "getProblems not yet implemented" messages\r
log4j.logger.eu.etaxonomy.cdm.model.name.TaxonName = ERROR\r
log4j.logger.eu.etaxonomy.cdm.database.UpdatableRoutingDataSource = INFO\r
-log4j.logger.eu.etaxonomy.cdm.persistence.dao.AbstractBeanInitializer = ERROR\r
+log4j.logger.eu.etaxonomy.cdm.persistence.dao.initializer = ERROR\r
log4j.logger.eu.etaxonomy.cdm.remote.controller = INFO\r
log4j.logger.eu.etaxonomy.cdm.remote.controller.interceptor = WARN\r
#log4j.logger.eu.etaxonomy.cdm.remote.json.processor.AbstractCdmBeanProcessor = DEBUG\r
<parent>
<groupId>eu.etaxonomy</groupId>
<artifactId>cdmlib-parent</artifactId>
- <version>5.2.0</version>
+ <version>5.3.0</version>
<relativePath>../pom.xml</relativePath>
</parent>
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
+import eu.etaxonomy.cdm.api.service.IClassificationService;
import eu.etaxonomy.cdm.api.service.IService;
+import eu.etaxonomy.cdm.api.service.ITaxonNodeService;
import eu.etaxonomy.cdm.api.service.pager.Pager;
import eu.etaxonomy.cdm.api.service.pager.impl.DefaultPagerImpl;
import eu.etaxonomy.cdm.hibernate.HibernateProxyHelper;
import eu.etaxonomy.cdm.model.common.CdmBase;
import eu.etaxonomy.cdm.model.common.IPublishable;
import eu.etaxonomy.cdm.model.reference.INomenclaturalReference;
+import eu.etaxonomy.cdm.model.taxon.Classification;
+import eu.etaxonomy.cdm.model.taxon.TaxonNode;
import eu.etaxonomy.cdm.remote.controller.util.PagerParameters;
import eu.etaxonomy.cdm.remote.editor.UUIDPropertyEditor;
HttpServletResponse response, List<String> pathProperties)
throws IOException {
+ @SuppressWarnings("unused")
boolean includeUnpublished = NO_UNPUBLISHED;
CDM_BASE cdmBaseObject;
// if (service instanceof IPublishableService){
return (S)result;
}
+
+ /**
+ * @param subtreeUuid
+ * @param response
+ * @return
+ * @throws IOException
+ */
+ protected TaxonNode getSubtreeOrError(UUID subtreeUuid, ITaxonNodeService taxonNodeService, HttpServletResponse response) throws IOException {
+ TaxonNode subtree = null;
+ if (subtreeUuid != null){
+ subtree = taxonNodeService.find(subtreeUuid);
+ if(subtree == null) {
+ response.sendError(404 , "TaxonNode not found using " + subtreeUuid );
+ //will not happen
+ return null;
+ }
+ }
+ return subtree;
+ }
+
+ protected Classification getClassificationOrError(UUID classificationUuid,
+ IClassificationService classificationService, HttpServletResponse response) throws IOException {
+ Classification classification = null;
+ if (classificationUuid != null){
+ classification = classificationService.find(classificationUuid);
+ if(classification == null) {
+ response.sendError(404 , "Classification not found: " + classificationUuid );
+ //will not happen
+ return null;
+ }
+ }
+ return classification;
+ }
+
/* TODO implement
private Validator validator;
import org.springframework.web.bind.annotation.RequestMethod;\r
import org.springframework.web.bind.annotation.RequestParam;\r
\r
+import eu.etaxonomy.cdm.api.service.IClassificationService;\r
import eu.etaxonomy.cdm.api.service.IService;\r
+import eu.etaxonomy.cdm.api.service.ITaxonNodeService;\r
import eu.etaxonomy.cdm.api.service.pager.Pager;\r
import eu.etaxonomy.cdm.model.common.CdmBase;\r
+import eu.etaxonomy.cdm.model.taxon.Classification;\r
+import eu.etaxonomy.cdm.model.taxon.TaxonNode;\r
import eu.etaxonomy.cdm.remote.controller.util.PagerParameters;\r
import eu.etaxonomy.cdm.remote.editor.CdmTypePropertyEditor;\r
import eu.etaxonomy.cdm.remote.editor.UUIDPropertyEditor;\r
return service.list(type, limit, start, null, getInitializationStrategy());\r
}\r
\r
+ // this is a copy from BaseController, should be unified\r
+ protected TaxonNode getSubtreeOrError(UUID subtreeUuid, ITaxonNodeService taxonNodeService, HttpServletResponse response) throws IOException {\r
+ TaxonNode subtree = null;\r
+ if (subtreeUuid != null){\r
+ subtree = taxonNodeService.find(subtreeUuid);\r
+ if(subtree == null) {\r
+ response.sendError(404 , "Taxon node for subtree not found: " + subtreeUuid );\r
+ //will not happen\r
+ return null;\r
+ }\r
+ }\r
+ return subtree;\r
+ }\r
+\r
+ // this is a copy from BaseController, should be unified\r
+ protected Classification getClassificationOrError(UUID classificationUuid,\r
+ IClassificationService classificationService, HttpServletResponse response) throws IOException {\r
+ Classification classification = null;\r
+ if (classificationUuid != null){\r
+ classification = classificationService.find(classificationUuid);\r
+ if(classification == null) {\r
+ response.sendError(404 , "Classification not found: " + classificationUuid );\r
+ //will not happen\r
+ return null;\r
+ }\r
+ }\r
+ return classification;\r
+ }\r
+\r
/* TODO\r
@RequestMapping(method = RequestMethod.POST)\r
public T doPost(@ModelAttribute("object") T object, BindingResult result) {\r
public void setTaxonNodeService(ITaxonNodeService taxonNodeService) {
this.taxonNodeService = taxonNodeService;
}
+ protected ITaxonNodeService getTaxonNodeService() {
+ return this.taxonNodeService;
+ }
@InitBinder
method = RequestMethod.GET)
public List<TaxonNode> getChildNodes(
@PathVariable("uuid") UUID classificationUuid,
+ @RequestParam(value = "subtree", required = false) UUID subtreeUuid,
HttpServletRequest request,
HttpServletResponse response
) throws IOException {
- return getChildNodesAtRank(classificationUuid, null, request, response);
+ return getChildNodesAtRank(classificationUuid, null, subtreeUuid, request, response);
}
@RequestMapping(
public List<TaxonNode> getChildNodesAtRank(
@PathVariable("uuid") UUID classificationUuid,
@PathVariable("rankUuid") UUID rankUuid,
+ @RequestParam(value = "subtree", required = false) UUID subtreeUuid,
HttpServletRequest request,
HttpServletResponse response
) throws IOException {
response.sendError(404 , "Classification not found using " + classificationUuid );
return null;
}
+
+ TaxonNode subtree = getSubtreeOrError(subtreeUuid, taxonNodeService, response);
+
Rank rank = findRank(rankUuid);
boolean includeUnpublished = NO_UNPUBLISHED;
// long start = System.currentTimeMillis();
- List<TaxonNode> rootNodes = service.listRankSpecificRootNodes(classification, rank, includeUnpublished, null, null, NODE_INIT_STRATEGY());
+ List<TaxonNode> rootNodes = service.listRankSpecificRootNodes(classification, subtree, rank,
+ includeUnpublished, null, null, NODE_INIT_STRATEGY());
// System.err.println("service.listRankSpecificRootNodes() " + (System.currentTimeMillis() - start));
return rootNodes;
}
+
/**
*
* @param uuid
import org.springframework.web.bind.annotation.PathVariable;\r
import org.springframework.web.bind.annotation.RequestMapping;\r
import org.springframework.web.bind.annotation.RequestMethod;\r
+import org.springframework.web.bind.annotation.RequestParam;\r
\r
import eu.etaxonomy.cdm.api.service.IClassificationService;\r
+import eu.etaxonomy.cdm.api.service.ITaxonNodeService;\r
import eu.etaxonomy.cdm.api.service.ITaxonService;\r
import eu.etaxonomy.cdm.api.service.ITermService;\r
+import eu.etaxonomy.cdm.exception.FilterException;\r
import eu.etaxonomy.cdm.exception.UnpublishedException;\r
import eu.etaxonomy.cdm.model.common.DefinedTermBase;\r
import eu.etaxonomy.cdm.model.name.Rank;\r
public static final Logger logger = Logger.getLogger(ClassificationPortalListController.class);\r
\r
private ITaxonService taxonService;\r
+ private ITaxonNodeService taxonNodeService;\r
\r
private ITermService termService;\r
\r
this.taxonService = taxonService;\r
}\r
\r
+ @Autowired\r
+ public void setTaxonNodeService(ITaxonNodeService taxonNodeService) {\r
+ this.taxonNodeService = taxonNodeService;\r
+ }\r
+\r
\r
@InitBinder\r
@Override\r
method = RequestMethod.GET)\r
public List<TaxonNode> getChildNodes(\r
@PathVariable("treeUuid") UUID treeUuid,\r
+ @RequestParam(value = "subtree", required = false) UUID subtreeUuid,\r
HttpServletRequest request,\r
HttpServletResponse response\r
) throws IOException {\r
\r
- return getChildNodesAtRank(treeUuid, null, request, response);\r
+ return getChildNodesAtRank(treeUuid, null, subtreeUuid, request, response);\r
}\r
\r
\r
public List<TaxonNode> getChildNodesAtRank(\r
@PathVariable("treeUuid") UUID treeUuid,\r
@PathVariable("rankUuid") UUID rankUuid,\r
+ @RequestParam(value = "subtree", required = false) UUID subtreeUuid,\r
HttpServletRequest request,\r
HttpServletResponse response\r
) throws IOException {\r
\r
logger.info("getChildNodesAtRank() " + request.getRequestURI());\r
- Classification tree = null;\r
+ Classification classification = null;\r
Rank rank = null;\r
if(treeUuid != null){\r
- tree = service.find(treeUuid);\r
- if(tree == null) {\r
+ classification = service.find(treeUuid);\r
+ if(classification == null) {\r
HttpStatusMessage.UUID_NOT_FOUND.send(response, "Classification not found using " + treeUuid);\r
return null;\r
}\r
}\r
+ TaxonNode subtree = getSubtreeOrError(subtreeUuid, taxonNodeService, response);\r
\r
rank = findRank(rankUuid);\r
boolean includeUnpublished = NO_UNPUBLISHED;\r
// long start = System.currentTimeMillis();\r
- List<TaxonNode> rootNodes = service.listRankSpecificRootNodes(tree, rank, includeUnpublished, null, null, NODE_INIT_STRATEGY);\r
+ List<TaxonNode> rootNodes = service.listRankSpecificRootNodes(classification, subtree, rank, includeUnpublished, null, null, NODE_INIT_STRATEGY);\r
// System.err.println("service.listRankSpecificRootNodes() " + (System.currentTimeMillis() - start));\r
return rootNodes;\r
}\r
public List<TaxonNode> getChildNodesOfTaxon(\r
@PathVariable("treeUuid") UUID treeUuid,\r
@PathVariable("taxonUuid") UUID taxonUuid,\r
+ @RequestParam(value = "subtree", required = false) UUID subtreeUuid,\r
HttpServletRequest request,\r
HttpServletResponse response) throws IOException {\r
logger.info("getChildNodesOfTaxon() " + request.getRequestURI());\r
\r
boolean includeUnpublished = NO_UNPUBLISHED; //for now we do not allow any remote service to publish unpublished data\r
\r
- List<TaxonNode> children = service.listChildNodesOfTaxon(taxonUuid, treeUuid,\r
- includeUnpublished, null, null, NODE_INIT_STRATEGY);\r
+ List<TaxonNode> children;\r
+ try {\r
+ children = service.listChildNodesOfTaxon(taxonUuid, treeUuid, subtreeUuid,\r
+ includeUnpublished, null, null, NODE_INIT_STRATEGY);\r
+ } catch (FilterException e) {\r
+ HttpStatusMessage.SUBTREE_FILTER_INVALID.send(response);\r
+ return null;\r
+ }\r
return children;\r
\r
}\r
@PathVariable("treeUuid") UUID treeUuid,\r
@PathVariable("taxonUuid") UUID taxonUuid,\r
@PathVariable("rankUuid") UUID rankUuid,\r
+ @RequestParam(value = "subtree", required = false) UUID subtreeUuid,\r
HttpServletRequest request,\r
HttpServletResponse response) throws IOException {\r
logger.info("getPathFromTaxonToRank() " + request.getRequestURI());\r
\r
boolean includeUnpublished = NO_UNPUBLISHED;\r
\r
- Classification tree = service.find(treeUuid);\r
+ Classification classification = service.find(treeUuid);\r
+ TaxonNode subtree = getSubtreeOrError(subtreeUuid, taxonNodeService, response);\r
Rank rank = findRank(rankUuid);\r
Taxon taxon = (Taxon) taxonService.load(taxonUuid);\r
\r
try {\r
- return service.loadTreeBranchToTaxon(taxon, tree, rank, includeUnpublished, NODE_INIT_STRATEGY);\r
+ List<TaxonNode> result = service.loadTreeBranchToTaxon(taxon, classification, subtree, rank, includeUnpublished, NODE_INIT_STRATEGY);\r
+ return result;\r
} catch (UnpublishedException e) {\r
HttpStatusMessage.ACCESS_DENIED.send(response);\r
return null;\r
value = {"{treeUuid}/pathFrom/{taxonUuid}"},\r
method = RequestMethod.GET)\r
public List<TaxonNode> getPathFromTaxon(\r
- @PathVariable("treeUuid") UUID treeUuid,\r
+ @PathVariable("treeUuid") UUID classificationUuid,\r
@PathVariable("taxonUuid") UUID taxonUuid,\r
+ @RequestParam(value = "subtree", required = false) UUID subtreeUuid,\r
HttpServletRequest request,\r
HttpServletResponse response) throws IOException {\r
\r
- return getPathFromTaxonToRank(treeUuid, taxonUuid, null, request, response);\r
+ return getPathFromTaxonToRank(classificationUuid, taxonUuid, null, subtreeUuid, request, response);\r
}\r
\r
\r
package eu.etaxonomy.cdm.remote.controller;
-import io.swagger.annotations.Api;
-
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import eu.etaxonomy.cdm.remote.editor.UUIDListPropertyEditor;
import eu.etaxonomy.cdm.remote.editor.UUIDPropertyEditor;
import eu.etaxonomy.cdm.remote.editor.UuidList;
+import io.swagger.annotations.Api;
/**
* TODO write controller documentation
* @throws IOException
*/
@RequestMapping(value = "find", method = RequestMethod.GET) // mapped as absolute path, see CdmAntPathMatcher
- public Pager<DescriptionElementBase> doFindDescriptionElements(
+ public Pager<? extends DescriptionElementBase> doFindDescriptionElements(
@RequestParam(value = "query", required = true) String queryString,
@RequestParam(value = "type", required = false) Class<? extends DescriptionElementBase> type,
@RequestParam(value = "pageSize", required = false) Integer pageSize,
PagerParameters pagerParams = new PagerParameters(pageSize, pageNumber);
pagerParams.normalizeAndValidate(response);
- Pager<DescriptionElementBase> pager = service.searchElements(type, queryString, pageSize, pageNumber, null, getInitializationStrategy());
+ Pager<? extends DescriptionElementBase> pager = service.searchElements(type, queryString, pageSize, pageNumber, null, getInitializationStrategy());
return pager;
}
public final static HttpStatusMessage INTERNAL_ERROR = new HttpStatusMessage(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, "internal server error");\r
\r
public final static HttpStatusMessage ACCESS_DENIED = new HttpStatusMessage(HttpServletResponse.SC_FORBIDDEN, "access denied");\r
+ public final static HttpStatusMessage SUBTREE_FILTER_INVALID = new HttpStatusMessage(HttpServletResponse.SC_NOT_FOUND, "invalid uuid for subtree filter");\r
\r
\r
private int statusCode;\r
FieldUnitDTO fieldUnitDto = null;\r
if(sob instanceof DerivedUnit){\r
\r
- fieldUnitDto = service.findFieldUnitDTO(PreservedSpecimenDTO.newInstance(sob) , new ArrayList<FieldUnitDTO>(), new HashMap<UUID, DerivateDTO>());\r
+ fieldUnitDto = service.findFieldUnitDTO(new PreservedSpecimenDTO(sob) , new ArrayList<FieldUnitDTO>(), new HashMap<UUID, DerivateDTO>());\r
\r
}\r
\r
HttpServletRequest request,
HttpServletResponse response) throws IOException {
+ logger.info("doGet() " + requestPathAndQuery(request));
+
Registration reg = super.doGet(uuid, request, response);
if(reg != null){
if(userHelper.userIsAutheticated() && userHelper.userIsAnnonymous() && !reg.getStatus().equals(RegistrationStatus.PUBLISHED)) {
import eu.etaxonomy.cdm.api.service.dto.TaxonRelationshipsDTO;\r
import eu.etaxonomy.cdm.api.service.pager.Pager;\r
import eu.etaxonomy.cdm.exception.UnpublishedException;\r
+import eu.etaxonomy.cdm.model.common.CdmBase;\r
import eu.etaxonomy.cdm.model.common.DefinedTermBase;\r
import eu.etaxonomy.cdm.model.common.MarkerType;\r
import eu.etaxonomy.cdm.model.common.RelationshipBase.Direction;\r
import eu.etaxonomy.cdm.model.description.TaxonDescription;\r
import eu.etaxonomy.cdm.model.occurrence.SpecimenOrObservationBase;\r
import eu.etaxonomy.cdm.model.taxon.Classification;\r
+import eu.etaxonomy.cdm.model.taxon.Synonym;\r
import eu.etaxonomy.cdm.model.taxon.Taxon;\r
import eu.etaxonomy.cdm.model.taxon.TaxonBase;\r
import eu.etaxonomy.cdm.model.taxon.TaxonNode;\r
private ITermService termService;\r
\r
protected static final List<String> TAXONNODE_INIT_STRATEGY = Arrays.asList(new String []{\r
- "taxonNodes"\r
+ "taxonNodes.classification"\r
});\r
\r
public TaxonController(){\r
return getInitializationStrategy();\r
}\r
\r
+ @RequestMapping(params="subtree", method = RequestMethod.GET)\r
+ public TaxonBase<?> doGet(@PathVariable("uuid") UUID uuid,\r
+ @RequestParam(value = "subtree", required = true) UUID subtreeUuid, //if subtree does not exist the base class method is used, therefore required\r
+ HttpServletRequest request,\r
+ HttpServletResponse response) throws IOException {\r
+ if(request != null) {\r
+ logger.info("doGet() " + requestPathAndQuery(request));\r
+ }\r
+ //TODO do we want to allow Synonyms at all? Maybe needs initialization\r
+ TaxonBase<?> taxonBase = getCdmBaseInstance(uuid, response, TAXONNODE_INIT_STRATEGY);\r
+ //TODO we should move subtree check down to service or persistence\r
+ TaxonNode subtree = getSubtreeOrError(subtreeUuid, nodeService, response);\r
+ taxonBase = checkExistsSubtreeAndAccess(taxonBase, subtree, NO_UNPUBLISHED, response);\r
+ return taxonBase;\r
+ }\r
+\r
+ /**\r
+ * Checks if a {@link TaxonBase taxonBase} is public and belongs to a {@link TaxonNode subtree}\r
+ * as accepted taxon or synonym.\r
+ * If not the according {@link HttpStatusMessage http messages} are added to response.\r
+ * <BR>\r
+ * Not (yet) checked is the relation to a subtree via a concept relationship.\r
+ * @param taxonBase\r
+ * @param includeUnpublished\r
+ * @param response\r
+ * @return\r
+ * @throws IOException\r
+ */\r
+ protected <S extends TaxonBase<?>> S checkExistsSubtreeAndAccess(S taxonBase, TaxonNode subtree, boolean includeUnpublished,\r
+ HttpServletResponse response) throws IOException {\r
+ taxonBase = checkExistsAndAccess(taxonBase, NO_UNPUBLISHED, response);\r
+ if (taxonBase != null){\r
+ //TODO synonyms maybe can not be initialized\r
+ Taxon taxon = taxonBase.isInstanceOf(Synonym.class)?\r
+ CdmBase.deproxy(taxonBase, Synonym.class).getAcceptedTaxon():\r
+ CdmBase.deproxy(taxonBase, Taxon.class);\r
+ //check if taxon has any node that is a descendant of subtree\r
+ for (TaxonNode taxonNode :taxon.getTaxonNodes()){\r
+ if (subtree.isAncestor(taxonNode)){\r
+ return taxonBase;\r
+ }\r
+ }\r
+ HttpStatusMessage.ACCESS_DENIED.send(response);\r
+ }\r
+ return null;\r
+ }\r
+\r
\r
/**\r
* Get the accepted {@link Taxon} for a given\r
@RequestMapping(value = "taxonNodes", method = RequestMethod.GET)\r
public Set<TaxonNode> doGetTaxonNodes(\r
@PathVariable("uuid") UUID uuid,\r
+ @RequestParam(value = "subtree", required = false) UUID subtreeUuid,\r
HttpServletRequest request,\r
HttpServletResponse response) throws IOException {\r
\r
- TaxonBase<?> tb = service.load(uuid, NO_UNPUBLISHED, TAXONNODE_INIT_STRATEGY);\r
- if(tb instanceof Taxon){\r
- return ((Taxon)tb).getTaxonNodes();\r
+ logger.info("doGetTaxonNodes" + requestPathAndQuery(request));\r
+ TaxonBase<?> taxonBase;\r
+ if (subtreeUuid != null){\r
+ taxonBase = doGet(uuid, subtreeUuid, request, response);\r
+ }else{\r
+ taxonBase = service.load(uuid, NO_UNPUBLISHED, TAXONNODE_INIT_STRATEGY);\r
+ }\r
+ if(taxonBase instanceof Taxon){\r
+ return ((Taxon)taxonBase).getTaxonNodes();\r
} else {\r
HttpStatusMessage.UUID_REFERENCES_WRONG_TYPE.send(response);\r
return null;\r
@RequestParam(value = "directTypes", required = false) UuidList directTypeUuids,\r
@RequestParam(value = "inversTypes", required = false) UuidList inversTypeUuids,\r
@RequestParam(value = "direction", required = false) Direction direction,\r
- @RequestParam(value="groupMisapplications", required=false, defaultValue="true") final boolean groupMisapplications,\r
+ @RequestParam(value="groupMisapplications", required=false, defaultValue="false") final boolean groupMisapplications,\r
HttpServletRequest request,\r
HttpServletResponse response) throws IOException {\r
\r
@RequestMapping(value = {"/taxon"})
public class TaxonListController extends AbstractIdentifiableListController<TaxonBase, ITaxonService> {
-
private static final List<String> SIMPLE_TAXON_INIT_STRATEGY = DEFAULT_INIT_STRATEGY;
protected List<String> getSimpleTaxonInitStrategy() {
return SIMPLE_TAXON_INIT_STRATEGY;
}
- /**
- *
- */
public TaxonListController(){
super();
setInitializationStrategy(Arrays.asList(new String[]{"$","name.nomenclaturalReference"}));
@Autowired
private ITaxonNodeService taxonNodeService;
-
@Autowired
private ITermService termService;
binder.registerCustomEditor(MatchMode.class, new MatchModePropertyEditor());
binder.registerCustomEditor(Rank.class, new RankPropertyEditor());
binder.registerCustomEditor(PresenceAbsenceTerm.class, new TermBasePropertyEditor<PresenceAbsenceTerm>(termService));
-
}
/**
@RequestMapping(method = RequestMethod.GET, value={"search"})
public Pager<SearchResult<TaxonBase>> doSearch(
@RequestParam(value = "query", required = true) String query,
- @RequestParam(value = "classificationUuid", required = false) UUID classificationUuid,
+ @RequestParam(value = "tree", required = false) UUID classificationUuid,
+ @RequestParam(value = "subtree", required = false) UUID subtreeUuid,
@RequestParam(value = "area", required = false) DefinedTermBaseList<NamedArea> areaList,
@RequestParam(value = "status", required = false) PresenceAbsenceTerm[] status,
@RequestParam(value = "pageNumber", required = false) Integer pageNumber,
searchModes.add(TaxaAndNamesSearchMode.includeUnpublished);
}
- Classification classification = classificationService.load(classificationUuid);
+ Classification classification = getClassificationOrError(classificationUuid, classificationService, response);
+ TaxonNode subtree = getSubtreeOrError(subtreeUuid, taxonNodeService, response);
Set<PresenceAbsenceTerm> statusSet = null;
if(status != null) {
}
return service.findTaxaAndNamesByFullText(searchModes, query,
- classification, areaSet, statusSet, null,
+ classification, subtree, areaSet, statusSet, null,
false, pagerParams.getPageSize(), pagerParams.getPageIndex(),
OrderHint.NOMENCLATURAL_SORT_ORDER.asList(), getSimpleTaxonInitStrategy());
}
@RequestMapping(method = RequestMethod.GET, value={"find"})
public Pager<IdentifiableEntity> doFind(
@RequestParam(value = "query", required = true) String query,
- @RequestParam(value = "tree", required = false) UUID treeUuid,
+ @RequestParam(value = "tree", required = false) UUID classificationUuid,
+ @RequestParam(value = "subtree", required = false) UUID subtreeUuid,
@RequestParam(value = "area", required = false) Set<NamedArea> areas,
@RequestParam(value = "pageNumber", required = false) Integer pageNumber,
@RequestParam(value = "pageSize", required = false) Integer pageSize,
config.setNamedAreas(areas);
config.setDoIncludeAuthors(includeAuthors != null ? includeAuthors : Boolean.FALSE);
config.setOrder(order);
- if(treeUuid != null){
- Classification classification = classificationService.find(treeUuid);
+ if(classificationUuid != null){
+ Classification classification = classificationService.find(classificationUuid);
config.setClassification(classification);
}
+ TaxonNode subtree = getSubtreeOrError(subtreeUuid, taxonNodeService, response);
+ config.setSubtree(subtree);
+
return service.findTaxaAndNames(config);
}
@RequestParam(value = "clazz", required = false) Class<? extends DescriptionElementBase> clazz,
@RequestParam(value = "query", required = true) String queryString,
@RequestParam(value = "tree", required = false) UUID treeUuid,
+ @RequestParam(value = "subtree", required = false) UUID subtreeUuid,
@RequestParam(value = "features", required = false) UuidList featureUuids,
@RequestParam(value = "languages", required = false) List<Language> languages,
@RequestParam(value = "hl", required = false) Boolean highlighting,
}
Classification classification = null;
- if(treeUuid != null){
+ if(treeUuid != null){
classification = classificationService.find(treeUuid);
- }
+ }
+ TaxonNode subtree = getSubtreeOrError(subtreeUuid, taxonNodeService, response);
- List<Feature> features = null;
- if(featureUuids != null){
+ List<Feature> features = null;
+ if(featureUuids != null){
features = new ArrayList<>(featureUuids.size());
for(UUID uuid : featureUuids){
features.add((Feature) termService.find(uuid));
}
- }
+ }
- Pager<SearchResult<TaxonBase>> pager = service.findByDescriptionElementFullText(
- clazz, queryString, classification, features, languages, highlighting,
+ Pager<SearchResult<TaxonBase>> pager = service.findByDescriptionElementFullText(
+ clazz, queryString, classification, subtree, features, languages, highlighting,
pagerParams.getPageSize(), pagerParams.getPageIndex(),
((List<OrderHint>)null), getSimpleTaxonInitStrategy());
- return pager;
+ return pager;
}
@RequestMapping(method = RequestMethod.GET, value={"findByFullText"})
public Pager<SearchResult<TaxonBase>> doFindByFullText(
@RequestParam(value = "clazz", required = false) Class<? extends TaxonBase> clazz,
@RequestParam(value = "query", required = true) String queryString,
- @RequestParam(value = "tree", required = false) UUID treeUuid,
+ @RequestParam(value = "tree", required = false) UUID classificationUuid,
+ @RequestParam(value = "subtree", required = false) UUID subtreeUuid,
@RequestParam(value = "languages", required = false) List<Language> languages,
@RequestParam(value = "hl", required = false) Boolean highlighting,
@RequestParam(value = "pageNumber", required = false) Integer pageNumber,
}
Classification classification = null;
- if(treeUuid != null){
- classification = classificationService.find(treeUuid);
+ if(classificationUuid != null){
+ classification = classificationService.find(classificationUuid);
}
+ TaxonNode subtree = getSubtreeOrError(subtreeUuid, taxonNodeService, response);
- Pager<SearchResult<TaxonBase>> pager = service.findByFullText(clazz, queryString, classification, includeUnpublished,
+ Pager<SearchResult<TaxonBase>> pager = service.findByFullText(clazz, queryString, classification, subtree,
+ includeUnpublished,
languages, highlighting, pagerParams.getPageSize(), pagerParams.getPageIndex(), ((List<OrderHint>) null),
initializationStrategy);
return pager;
@RequestParam(value = "clazz", required = false) Class<? extends TaxonBase> clazz,
@RequestParam(value = "query", required = true) String queryString,
@RequestParam(value = "tree", required = false) UUID treeUuid,
+ @RequestParam(value = "subtree", required = false) UUID subtreeUuid,
@RequestParam(value = "languages", required = false) List<Language> languages,
@RequestParam(value = "hl", required = false) Boolean highlighting,
@RequestParam(value = "pageNumber", required = false) Integer pageNumber,
}
Classification classification = null;
- if(treeUuid != null){
+ if(treeUuid != null){
classification = classificationService.find(treeUuid);
- }
+ }
+ TaxonNode subtree = getSubtreeOrError(subtreeUuid, taxonNodeService, response);
- Pager<SearchResult<TaxonBase>> pager = service.findByEverythingFullText(
- queryString, classification, includeUnpublished, languages, highlighting,
+ Pager<SearchResult<TaxonBase>> pager = service.findByEverythingFullText(
+ queryString, classification, subtree, includeUnpublished, languages, highlighting,
pagerParams.getPageSize(), pagerParams.getPageIndex(),
((List<OrderHint>)null), initializationStrategy);
- return pager;
+ return pager;
}
/**
import java.io.IOException;
import java.util.UUID;
+import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
+import org.apache.log4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.PathVariable;
@RequestMapping(value = {"/taxonNode/{uuid}"})
public class TaxonNodeController extends AbstractController<TaxonNode, ITaxonNodeService> {
+ public static final Logger logger = Logger.getLogger(TaxonNodeController.class);
+
@Override
@Autowired
public void setService(ITaxonNodeService service) {
method = RequestMethod.GET)
public TaxonNodeDto doGetParent(
@PathVariable("uuid") UUID uuid,
+ HttpServletRequest request,
HttpServletResponse response
) throws IOException {
+ logger.info("doGetParent() " + requestPathAndQuery(request));
return service.parentDto(uuid);
}
+ @RequestMapping(
+ method = RequestMethod.GET)
+ public TaxonNodeDto doGet(
+ @PathVariable("uuid") UUID uuid,
+ HttpServletRequest request,
+ HttpServletResponse response
+ ) throws IOException {
+
+ logger.info("doGet() " + requestPathAndQuery(request));
+ return service.dto(uuid);
+ }
+
/**
*
* @param uuid
import eu.etaxonomy.cdm.api.service.ITaxonService;
import eu.etaxonomy.cdm.api.service.ITermService;
import eu.etaxonomy.cdm.api.service.util.TaxonRelationshipEdge;
-import eu.etaxonomy.cdm.database.UpdatableRoutingDataSource;
import eu.etaxonomy.cdm.model.common.RelationshipBase.Direction;
import eu.etaxonomy.cdm.model.location.NamedArea;
import eu.etaxonomy.cdm.model.media.Media;
"childNodes.taxon",
});
- protected static final List<String> TAXONNODE_INIT_STRATEGY = Arrays.asList(new String []{
- "taxonNodes.classification"
- });
-
-
-
private static final String featureTreeUuidPattern = "^/taxon(?:(?:/)([^/?#&\\.]+))+.*";
return list;
}
- @Override
- @RequestMapping(value = "taxonNodes", method = RequestMethod.GET)
- public Set<TaxonNode> doGetTaxonNodes(
- @PathVariable("uuid") UUID uuid,
- HttpServletRequest request,
- HttpServletResponse response) throws IOException {
-
- logger.info("doGetTaxonNodes" + requestPathAndQuery(request));
- TaxonBase<?> taxon = service.load(uuid, NO_UNPUBLISHED, TAXONNODE_INIT_STRATEGY);
- if(taxon instanceof Taxon){
- return ((Taxon)taxon).getTaxonNodes();
- } else {
- HttpStatusMessage.UUID_REFERENCES_WRONG_TYPE.send(response);
- return null;
- }
- }
// @RequestMapping(value = "specimens", method = RequestMethod.GET)
// public ModelAndView doGetSpecimens(
# avoid vast amount of "getCitation not yet implemented" & "getProblems not yet implemented" messages
log4j.logger.eu.etaxonomy.cdm.model.name.TaxonName = ERROR
log4j.logger.eu.etaxonomy.cdm.database.UpdatableRoutingDataSource = INFO
-log4j.logger.eu.etaxonomy.cdm.persistence.dao.initializer.AbstractBeanInitializer = ERROR
+log4j.logger.eu.etaxonomy.cdm.persistence.dao.initializer = ERROR
log4j.logger.eu.etaxonomy.cdm.remote.controller = INFO
#log4j.logger.eu.etaxonomy.cdm.remote.json.processor.AbstractCdmBeanProcessor = DEBUG
log4j.logger.eu.etaxonomy.cdm.remote.controller.interceptor = WARN
<!-- \r
This applicationContext is not a full cdm application context. It only covers the remote compoents \r
without service and persistence. The cacheManager is usually provided via a Java config class in\r
- eu/etaxonomy/cdm/service/api/conf which cannot use in this conetext. Therefore we are \r
+ eu/etaxonomy/cdm/service/api/conf which cannot use in this context. Therefore we are \r
using the EhCacheManagerFactoryBean here to initialize the cacheManager bean.\r
-->\r
<bean id="cacheManager" class="org.springframework.cache.ehcache.EhCacheManagerFactoryBean">\r
# avoid vast amount of "getCitation not yet implemented" & "getProblems not yet implemented" messages\r
log4j.logger.eu.etaxonomy.cdm.model.name.TaxonName = ERROR\r
log4j.logger.eu.etaxonomy.cdm.database.UpdatableRoutingDataSource = INFO\r
-log4j.logger.eu.etaxonomy.cdm.persistence.dao.AbstractBeanInitializer = ERROR\r
+log4j.logger.eu.etaxonomy.cdm.persistence.dao.initializer = ERROR\r
log4j.logger.eu.etaxonomy.cdm.remote.controller = INFO\r
log4j.logger.eu.etaxonomy.cdm.remote.controller.interceptor = WARN\r
#log4j.logger.eu.etaxonomy.cdm.remote.json.processor.AbstractCdmBeanProcessor = DEBUG\r
<parent>
<groupId>eu.etaxonomy</groupId>
<artifactId>cdmlib-parent</artifactId>
- <version>5.2.0</version>
+ <version>5.3.0</version>
<relativePath>../pom.xml</relativePath>
</parent>
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
+import org.springframework.context.annotation.Lazy;
import org.springframework.orm.hibernate5.HibernateTransactionManager;
import org.springframework.security.authentication.ProviderManager;
import org.springframework.security.authentication.UsernamePasswordAuthenticationToken;
//@Autowired
private DataSource dataSource;
@Autowired
+ @Lazy
private ProviderManager authenticationManager;
@Autowired
private IUserService userService;
import net.sf.ehcache.store.MemoryStoreEvictionPolicy;
/**
- * CDM Entity Cacher class based on EhCache.
+ * CDM Entity Cacher class based on EhCache using UUID as key.
* The cacher allows null values to be cached.
*
* @author cmathew
- *
- * @param <T>
*/
public abstract class CdmCacher implements ICdmUuidCacher {
@Autowired
public CacheManager cacheManager;
- public static final String DEFAULT_CACHE_NAME = "defaultCache"; //TODO compare with CacheConfiguration where the name for the default cache is 'default', Why another name here?
+ public static final String DEFAULT_CACHE_NAME = "cdmDefaultCache"; //TODO compare with CacheConfiguration where the name for the default cache is 'default', Why another name here?
/**
* Constructor which initialises a singleton {@link net.sf.ehcache.CacheManager}
return defaultCache;
}
+ @Override
+ public void dispose(){
+ cacheManager.getCache(DEFAULT_CACHE_NAME).dispose();
+ }
+
/**
* Gets the cache element corresponding to the given {@link java.util.UUID}
*
@Override
public boolean existsAndIsNotNull(UUID uuid) {
Element e = getCacheElement(uuid);
- CdmBase cdmEntity;
if (e != null) {
return e.getObjectValue() != null;
}
* CDM Entity Cacher class which handles the caching of Defined Terms.
*
* @author cmathew
- *
- * @param <T>
*/
@Component
public class CdmTermCacher extends CdmCacher {
/**
- * Returns the default cache configuration.
+ * Returns the default cache configuration for the cache
+ * named {@link CdmCacher#DEFAULT_CACHE_NAME "cdmDefaultCache"}
*
* @return
*/
CacheConfiguration cc = new CacheConfiguration(CdmCacher.DEFAULT_CACHE_NAME, 500)
.memoryStoreEvictionPolicy(MemoryStoreEvictionPolicy.LFU)
+ .maxEntriesLocalHeap(10) // avoid ehache consuming too much heap
.eternal(false)
// default ttl and tti set to 2 hours
.timeToLiveSeconds(60*60*2)
import eu.etaxonomy.cdm.api.service.pager.impl.AbstractPagerImpl;
import eu.etaxonomy.cdm.api.service.pager.impl.DefaultPagerImpl;
import eu.etaxonomy.cdm.common.monitor.IProgressMonitor;
+import eu.etaxonomy.cdm.exception.FilterException;
import eu.etaxonomy.cdm.exception.UnpublishedException;
import eu.etaxonomy.cdm.hibernate.HHH_9751_Util;
import eu.etaxonomy.cdm.hibernate.HibernateProxyHelper;
}
}
+ @Override
+ public List<TaxonNode> listRankSpecificRootNodes(Classification classification, Rank rank,
+ boolean includeUnpublished, Integer pageSize, Integer pageIndex, List<String> propertyPaths) {
+ return listRankSpecificRootNodes(classification, null, rank, includeUnpublished, pageSize, pageIndex, propertyPaths);
+ }
+
/**
* {@inheritDoc}
*/
@Override
- public List<TaxonNode> listRankSpecificRootNodes(Classification classification, Rank rank,
+ public List<TaxonNode> listRankSpecificRootNodes(Classification classification,
+ TaxonNode subtree, Rank rank,
boolean includeUnpublished, Integer pageSize, Integer pageIndex, List<String> propertyPaths) {
- return pageRankSpecificRootNodes(classification, rank, includeUnpublished, pageSize, pageIndex, propertyPaths).getRecords();
+ return pageRankSpecificRootNodes(classification, subtree, rank, includeUnpublished, pageSize, pageIndex, propertyPaths).getRecords();
}
@Override
public Pager<TaxonNode> pageRankSpecificRootNodes(Classification classification, Rank rank,
boolean includeUnpublished, Integer pageSize, Integer pageIndex, List<String> propertyPaths) {
- long[] numberOfResults = dao.countRankSpecificRootNodes(classification, includeUnpublished, rank);
+ return pageRankSpecificRootNodes(classification, null, rank, includeUnpublished, pageSize, pageIndex, propertyPaths);
+ }
+
+ @Override
+ public Pager<TaxonNode> pageRankSpecificRootNodes(Classification classification, TaxonNode subtree, Rank rank,
+ boolean includeUnpublished, Integer pageSize, Integer pageIndex, List<String> propertyPaths) {
+ long[] numberOfResults = dao.countRankSpecificRootNodes(classification, subtree, includeUnpublished, rank);
long totalNumberOfResults = numberOfResults[0] + (numberOfResults.length > 1 ? numberOfResults[1] : 0);
List<TaxonNode> results = new ArrayList<>();
}
List<TaxonNode> perQueryResults = dao.listRankSpecificRootNodes(classification,
- rank, includeUnpublished, remainingLimit, start, propertyPaths, queryIndex);
+ subtree, rank, includeUnpublished, remainingLimit,
+ start, propertyPaths, queryIndex);
results.addAll(perQueryResults);
if(remainingLimit != null ){
remainingLimit = remainingLimit - results.size();
}
+ @Override
+ public List<TaxonNode> loadTreeBranch(TaxonNode taxonNode, Rank baseRank,
+ boolean includeUnpublished, List<String> propertyPaths) throws UnpublishedException{
+ return loadTreeBranch(taxonNode, null, baseRank, includeUnpublished, propertyPaths);
+ }
/**
* {@inheritDoc}
*/
@Override
- public List<TaxonNode> loadTreeBranch(TaxonNode taxonNode, Rank baseRank,
+ public List<TaxonNode> loadTreeBranch(TaxonNode taxonNode, TaxonNode subtree, Rank baseRank,
boolean includeUnpublished, List<String> propertyPaths) throws UnpublishedException{
TaxonNode thisNode = taxonNodeDao.load(taxonNode.getUuid(), propertyPaths);
if(baseRank != null && parentNodeRank != null && baseRank.isLower(parentNodeRank)){
break;
}
+ if((subtree!= null && !subtree.isAncestor(parentNode) )){
+ break;
+ }
pathToRoot.add(parentNode);
thisNode = parentNode;
@Override
public List<TaxonNode> loadTreeBranchToTaxon(Taxon taxon, Classification classification, Rank baseRank,
boolean includeUnpublished, List<String> propertyPaths) throws UnpublishedException{
+ return loadTreeBranchToTaxon(taxon, classification, null, baseRank, includeUnpublished, propertyPaths);
+ }
+
+ @Override
+ public List<TaxonNode> loadTreeBranchToTaxon(Taxon taxon, Classification classification,
+ TaxonNode subtree, Rank baseRank,
+ boolean includeUnpublished, List<String> propertyPaths) throws UnpublishedException{
UUID nodeUuid = getTaxonNodeUuidByTaxonUuid(classification.getUuid(), taxon.getUuid());
TaxonNode node = taxonNodeService.find(nodeUuid);
if(node == null){
logger.warn("The specified taxon is not found in the given tree.");
return null;
+ }else if (subtree != null && !node.isDescendant(subtree)){
+ //TODO handle as exception? E.g. FilterException, AccessDeniedException?
+ logger.warn("The specified taxon is not found for the given subtree.");
+ return null;
}
- return loadTreeBranch(node, baseRank, includeUnpublished, propertyPaths);
+
+ return loadTreeBranch(node, subtree, baseRank, includeUnpublished, propertyPaths);
}
@Override
public List<TaxonNode> listChildNodesOfTaxon(UUID taxonUuid, UUID classificationUuid,
boolean includeUnpublished, Integer pageSize, Integer pageIndex, List<String> propertyPaths){
+ try {
+ return listChildNodesOfTaxon(taxonUuid, classificationUuid, null, includeUnpublished, pageSize, pageIndex, propertyPaths);
+ } catch (FilterException e) {
+ throw new RuntimeException(e); //this should not happen as filter is null
+ }
+ }
+
+ @Override
+ public List<TaxonNode> listChildNodesOfTaxon(UUID taxonUuid, UUID classificationUuid, UUID subtreeUuid,
+ boolean includeUnpublished, Integer pageSize, Integer pageIndex, List<String> propertyPaths) throws FilterException{
Classification classification = dao.load(classificationUuid);
Taxon taxon = (Taxon) taxonDao.load(taxonUuid);
+ TaxonNode subtree = taxonNodeDao.load(subtreeUuid);
+ if (subtreeUuid != null && subtree == null){
+ throw new FilterException("Taxon node for subtree filter can not be found in database", true);
+ }
List<TaxonNode> results = dao.listChildrenOf(
- taxon, classification, includeUnpublished, pageSize, pageIndex, propertyPaths);
+ taxon, classification, subtree, includeUnpublished, pageSize, pageIndex, propertyPaths);
Collections.sort(results, taxonNodeComparator); // FIXME this is only a HACK, order during the hibernate query in the dao
return results;
}
*\r
* The contents of this file are subject to the Mozilla Public License Version 1.1\r
* See LICENSE.TXT at the top of this package for the full license terms.\r
-*/ \r
+*/\r
\r
package eu.etaxonomy.cdm.api.service;\r
\r
\r
@Service\r
@Transactional(readOnly = true)\r
-public class CollectionServiceImpl extends IdentifiableServiceBase<Collection, ICollectionDao> implements ICollectionService {\r
- \r
+public class CollectionServiceImpl\r
+ extends IdentifiableServiceBase<Collection, ICollectionDao>\r
+ implements ICollectionService {\r
+\r
@SuppressWarnings("unused")\r
static private final Logger logger = Logger.getLogger(CollectionServiceImpl.class);\r
\r
}\r
super.updateTitleCacheImpl(clazz, stepSize, cacheStrategy, monitor);\r
}\r
- \r
\r
- \r
- public List<Collection> searchByCode(String code) {\r
+\r
+ @Override\r
+ public List<Collection> searchByCode(String code) {\r
return this.dao.getCollectionByCode(code);\r
}\r
}\r
/**
* @author a.mueller
- * @since 24.06.2008
- * @version 1.0
- */
-/**
* @author a.kohlbecker
- * @since Dec 5, 2013
+ *
+ * @since 24.06.2008
*
*/
@Service
* move: descriptionElementService.search
*/
@Override
- public Pager<DescriptionElementBase> searchElements(Class<? extends DescriptionElementBase> clazz, String queryString, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) {
+// public Pager<T> searchElements(Class<? extends T> clazz, String queryString, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) {
+ public <S extends DescriptionElementBase> Pager<S> searchElements(Class<S> clazz, String queryString, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) {
long numberOfResults = descriptionElementDao.count(clazz, queryString);
- List<DescriptionElementBase> results = new ArrayList<>();
+ List<S> results = new ArrayList<>();
if(numberOfResults > 0) { // no point checking again //TODO use AbstractPagerImpl.hasResultsInRange(numberOfResults, pageNumber, pageSize)
- results = descriptionElementDao.search(clazz, queryString, pageSize, pageNumber, orderHints, propertyPaths);
+ results = (List<S>)descriptionElementDao.search(clazz, queryString, pageSize, pageNumber, orderHints, propertyPaths);
}
return new DefaultPagerImpl<>(pageNumber, numberOfResults, pageSize, results);
@Override
public Collection<SpecimenNodeWrapper> loadSpecimens(DescriptiveDataSet descriptiveDataSet){
- //set filter parameters
+ List<UUID> filteredNodes = findFilteredTaxonNodes(descriptiveDataSet);
+ return occurrenceService.listUuidAndTitleCacheByAssociatedTaxon(filteredNodes, null, null);
+ }
+
+ @Override
+ public List<UUID> findFilteredTaxonNodes(DescriptiveDataSet descriptiveDataSet){
TaxonNodeFilter filter = TaxonNodeFilter.NewRankInstance(descriptiveDataSet.getMinRank(), descriptiveDataSet.getMaxRank());
descriptiveDataSet.getGeoFilter().forEach(area -> filter.orArea(area.getUuid()));
descriptiveDataSet.getTaxonSubtreeFilter().forEach(node -> filter.orSubtree(node));
filter.setIncludeUnpublished(true);
- List<UUID> filteredNodes = taxonNodeService.uuidList(filter);
- return occurrenceService.listUuidAndTitleCacheByAssociatedTaxon(filteredNodes, null, null);
+ return taxonNodeService.uuidList(filter);
+ }
+
+ @Override
+ public List<TaxonNode> loadFilteredTaxonNodes(DescriptiveDataSet descriptiveDataSet, List<String> propertyPaths){
+ return taxonNodeService.load(findFilteredTaxonNodes(descriptiveDataSet), propertyPaths);
}
- private TaxonNode findTaxonNodeForDescription(TaxonNode taxonNode, DescriptionBase description){
- List<DerivedUnit> units = occurrenceService.listByAssociatedTaxon(DerivedUnit.class, null, taxonNode.getTaxon(), null, null, null, null, Arrays.asList("descriptions"));
- for (DerivedUnit unit : units) {
- if(unit.getDescriptions().contains(description)){
+ private TaxonNode findTaxonNodeForDescription(TaxonNode taxonNode, SpecimenOrObservationBase specimen){
+ Collection<SpecimenNodeWrapper> nodeWrapper = occurrenceService.listUuidAndTitleCacheByAssociatedTaxon(Arrays.asList(taxonNode.getUuid()), null, null);
+ for (SpecimenNodeWrapper specimenNodeWrapper : nodeWrapper) {
+ if(specimenNodeWrapper.getUuidAndTitleCache().getId().equals(specimen.getId())){
return taxonNode;
}
}
for (TaxonNode node : taxonSubtreeFilter) {
//check for node
node = taxonNodeService.load(node.getId(), Arrays.asList("taxon"));
- taxonNode = findTaxonNodeForDescription(node, description);
+ taxonNode = findTaxonNodeForDescription(node, specimen);
if(taxonNode!=null){
break;
}
//check for child nodes
List<TaxonNode> allChildren = taxonNodeService.loadChildNodesOfTaxonNode(node, Arrays.asList("taxon"), true, true, null);
for (TaxonNode child : allChildren) {
- taxonNode = findTaxonNodeForDescription(child, description);
+ taxonNode = findTaxonNodeForDescription(child, specimen);
if(taxonNode!=null){
break;
}
import eu.etaxonomy.cdm.api.service.dto.GroupedTaxonDTO;
import eu.etaxonomy.cdm.api.service.dto.TaxonInContextDTO;
import eu.etaxonomy.cdm.api.service.pager.Pager;
+import eu.etaxonomy.cdm.exception.FilterException;
import eu.etaxonomy.cdm.exception.UnpublishedException;
import eu.etaxonomy.cdm.model.common.MarkerType;
import eu.etaxonomy.cdm.model.media.MediaRepresentation;
* If the <code>rank</code> is null the absolute root nodes will be returned.
* @param classification may be null for all classifications
+ * @param subtree filter on a taxonomic subtree
* @param rank the set to null for to get the root nodes of classifications
* @param includeUnpublished if <code>true</code> unpublished taxa are also exported
* @param pageSize The maximum number of relationships returned (can be null for all relationships)
* @param pageIndex The offset (in pageSize chunks) from the start of the result set (0 - based)
* @param propertyPaths
* @return
+ * @see #pageRankSpecificRootNodes(Classification, TaxonNode, Rank, boolean, Integer, Integer, List)
*
*/
- public List<TaxonNode> listRankSpecificRootNodes(Classification classification, Rank rank,
- boolean includeUnpublished, Integer pageSize, Integer pageIndex, List<String> propertyPaths);
+ public List<TaxonNode> listRankSpecificRootNodes(Classification classification, TaxonNode subtree,
+ Rank rank, boolean includeUnpublished, Integer pageSize, Integer pageIndex,
+ List<String> propertyPaths);
+
+
+ /**
+ * @see #listRankSpecificRootNodes(Classification, TaxonNode, Rank, boolean, Integer, Integer, List)
+ * @deprecated keep this for compatibility to older versions, might be removed in versions >5.3
+ */
+ @Deprecated
+ public List<TaxonNode> listRankSpecificRootNodes(Classification classification,
+ Rank rank, boolean includeUnpublished, Integer pageSize, Integer pageIndex,
+ List<String> propertyPaths);
/**
* If the <code>rank</code> is null the absolute root nodes will be returned.
*
* @param classification may be null for all classifications
+ * @param subtree the taxonomic subtree filter
* @param rank the set to null for to get the root nodes of classifications
* @param includeUnpublished if <code>true</code> unpublished taxa are also exported
* @param pageSize The maximum number of relationships returned (can be null for all relationships)
* @param propertyPaths
* @return
*
+ * @see #listRankSpecificRootNodes(Classification, TaxonNode, Rank, boolean, Integer, Integer, List)
+ */
+ public Pager<TaxonNode> pageRankSpecificRootNodes(Classification classification, TaxonNode subtree,
+ Rank rank, boolean includeUnpublished, Integer pageSize, Integer pageIndex,
+ List<String> propertyPaths);
+ /**
+ * @see #pageRankSpecificRootNodes(Classification, TaxonNode, Rank, boolean, Integer, Integer, List)
+ * @deprecated keep this for compatibility to older versions, might be removed in versions >5.3
*/
- public Pager<TaxonNode> pageRankSpecificRootNodes(Classification classification, Rank rank,
- boolean includeUnpublished, Integer pageSize, Integer pageIndex, List<String> propertyPaths);
+ @Deprecated
+ public Pager<TaxonNode> pageRankSpecificRootNodes(Classification classification,
+ Rank rank, boolean includeUnpublished, Integer pageSize, Integer pageIndex,
+ List<String> propertyPaths);
/**
* @param taxonNode
* @throws UnpublishedException
* if any of the taxa in the path is unpublished an {@link UnpublishedException} is thrown.
*/
+ public List<TaxonNode> loadTreeBranch(TaxonNode taxonNode, TaxonNode subtree, Rank baseRank, boolean includeUnpublished,
+ List<String> propertyPaths) throws UnpublishedException;
public List<TaxonNode> loadTreeBranch(TaxonNode taxonNode, Rank baseRank, boolean includeUnpublished,
List<String> propertyPaths) throws UnpublishedException;
* @throws UnpublishedException
* if any of the taxa in the path is unpublished an {@link UnpublishedException} is thrown
*/
- public List<TaxonNode> loadTreeBranchToTaxon(Taxon taxon, Classification classification, Rank baseRank,
+ public List<TaxonNode> loadTreeBranchToTaxon(Taxon taxon, Classification classification,
+ TaxonNode subtree, Rank baseRank,
+ boolean includeUnpublished, List<String> propertyPaths) throws UnpublishedException;
+ public List<TaxonNode> loadTreeBranchToTaxon(Taxon taxon, Classification classification,
+ Rank baseRank,
boolean includeUnpublished, List<String> propertyPaths) throws UnpublishedException;
public List<TaxonNode> listChildNodesOfTaxon(UUID taxonUuid, UUID classificationUuid, boolean includeUnpublished,
Integer pageSize, Integer pageIndex, List<String> propertyPaths);
+ public List<TaxonNode> listChildNodesOfTaxon(UUID taxonUuid, UUID classificationUuid, UUID subtreeUuid, boolean includeUnpublished,
+ Integer pageSize, Integer pageIndex, List<String> propertyPaths) throws FilterException;
+
+
/**
* @param taxonNode
* @param propertyPaths
* @return a Pager DescriptionElementBase instances
* @see <a href="http://lucene.apache.org/java/2_4_0/queryparsersyntax.html">Apache Lucene - Query Parser Syntax</a>
*/
- public Pager<DescriptionElementBase> searchElements(Class<? extends DescriptionElementBase> clazz, String queryString, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths);
+ public <S extends DescriptionElementBase> Pager<S> searchElements(Class<S> clazz, String queryString, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths);
/**
* Returns a List of Media that are associated with a given description element
public UUID monitGetRowWrapper(DescriptiveDataSet descriptiveDataSet);
/**
- * Loads all avaliable specimens wrapped in a {@link SpecimenNodeWrapper} object for
+ * Loads all available specimens wrapped in a {@link SpecimenNodeWrapper} object for
* a given {@link DescriptiveDataSet} according to the filters set in the working set
- * @param descriptiveDataSet the working set for which the specimens should be fetched
+ * @param descriptiveDataSet the data set for which the specimens should be fetched
* @return a collection of wrapper objects
*/
public Collection<SpecimenNodeWrapper> loadSpecimens(DescriptiveDataSet descriptiveDataSet);
+ /**
+ * Lists all taxon nodes that match the filter set defined in the
+ * {@link DescriptiveDataSet} given.
+ * @param the data set which defined the taxon node filter
+ * @return a list of {@link UUID}s from the filtered nodes
+ *
+ */
+ public List<UUID> findFilteredTaxonNodes(DescriptiveDataSet descriptiveDataSet);
+
/**
* Creates a row wrapper object for the given description
* @param description the description for which the wrapper should be created
* @return
*/
public SpecimenDescription findDescriptionForDescriptiveDataSet(UUID descriptiveDataSetUuid, UUID specimenUuid);
+
+ /**
+ * Loads all taxon nodes that match the filter set defined in the
+ * {@link DescriptiveDataSet} given.
+ * @param the data set which defined the taxon node filter
+ * @return a list of {@link TaxonNode}s from the filtered nodes
+ *
+ */
+ public List<TaxonNode> loadFilteredTaxonNodes(DescriptiveDataSet descriptiveDataSet, List<String> propertyPaths);
}
import eu.etaxonomy.cdm.model.common.MarkerType;
import eu.etaxonomy.cdm.model.media.Rights;
import eu.etaxonomy.cdm.persistence.dao.common.Restriction;
+import eu.etaxonomy.cdm.persistence.dao.initializer.IBeanInitializer;
import eu.etaxonomy.cdm.persistence.dto.UuidAndTitleCache;
import eu.etaxonomy.cdm.persistence.query.MatchMode;
import eu.etaxonomy.cdm.persistence.query.OrderHint;
import eu.etaxonomy.cdm.strategy.cache.common.IIdentifiableEntityCacheStrategy;
import eu.etaxonomy.cdm.strategy.match.IMatchStrategy;
+import eu.etaxonomy.cdm.strategy.match.IMatchable;
+import eu.etaxonomy.cdm.strategy.merge.IMergable;
import eu.etaxonomy.cdm.strategy.merge.IMergeStrategy;
public interface IIdentifiableEntityService<T extends IdentifiableEntity>
* authorTeam.persistentTitleCache
* @return a paged list of instances of type T matching the queryString
*/
- public Pager<T> findByTitle(Class<? extends T> clazz, String queryString,MatchMode matchmode, List<Criterion> criteria, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths);
+ public <S extends T> Pager<S> findByTitle(Class<S> clazz, String queryString, MatchMode matchmode, List<Criterion> criteria, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths);
/**
* authorTeam.persistentTitleCache
* @return a paged list of instances of type T matching the queryString
*/
- public Pager<T> findByTitleWithRestrictions(Class<? extends T> clazz, String queryString,MatchMode matchmode, List<Restriction<?>> restrictions, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths);
+ public <S extends T> Pager<S> findByTitleWithRestrictions(Class<S> clazz, String queryString,MatchMode matchmode, List<Restriction<?>> restrictions, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths);
/**
*
* @return a paged list of instances of type T matching the queryString
*/
- public Pager<T> findByTitle(IIdentifiableEntityServiceConfigurator<T> configurator);
+ public <S extends T> Pager<S> findByTitle(IIdentifiableEntityServiceConfigurator<S> configurator);
/**
* Return an Integer of how many objects matching the given query string, optionally filtered by class, optionally with a particular MatchMode
* authorTeam.persistentTitleCache
* @return a list of instances of type T matching the queryString
*/
- public List<T> listByTitle(Class<? extends T> clazz, String queryString,MatchMode matchmode, List<Criterion> criteria, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths);
+ public <S extends T> List<S> listByTitle(Class<S> clazz, String queryString,MatchMode matchmode, List<Criterion> criteria, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths);
/**
* Return a List of objects matching the given query string, optionally filtered by class, optionally with a particular MatchMode
* authorTeam.persistentTitleCache
* @return a list of instances of type T matching the queryString
*/
- public List<T> listByTitleWithRestrictions(Class<? extends T> clazz, String queryString,MatchMode matchmode, List<Restriction<?>> restrictions, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths);
+ public <S extends T> List<S> listByTitleWithRestrictions(Class<S> clazz, String queryString,MatchMode matchmode, List<Restriction<?>> restrictions, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths);
/**
* Return a List of objects matching the given query string, optionally filtered by class, optionally with a particular MatchMode
* authorTeam.persistentTitleCache
* @return a list of instances of type T matching the queryString
*/
- public List<T> listByReferenceTitle(Class<? extends T> clazz, String queryString,MatchMode matchmode, List<Criterion> criteria, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths);
+ public <S extends T> List<S> listByReferenceTitle(Class<S> clazz, String queryString,MatchMode matchmode, List<Criterion> criteria, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths);
/**
* Return a List of objects matching the given query string, optionally filtered by class, optionally with a particular MatchMode
* authorTeam.persistentTitleCache
* @return a list of instances of type T matching the queryString
*/
- public List<T> listByReferenceTitleWithRestrictions(Class<? extends T> clazz, String queryString,MatchMode matchmode, List<Restriction<?>> restrictions, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths);
+ public <S extends T> List<S> listByReferenceTitleWithRestrictions(Class<S> clazz, String queryString,MatchMode matchmode, List<Restriction<?>> restrictions, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths);
/**
* Returns a Paged List of IdentifiableEntity instances where the default field matches the String queryString (as interpreted by the Lucene QueryParser)
* @see <a href="http://lucene.apache.org/java/2_4_0/queryparsersyntax.html">Apache Lucene - Query Parser Syntax</a>
*/
public Pager<T> search(Class<? extends T> clazz, String queryString, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths);
+// public <S extends T> Pager<S> search(Class<S> clazz, String queryString, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths);
/**
* @param matchMode
* @return
*/
- public Pager<T> findTitleCache(Class<? extends T> clazz, String queryString, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, MatchMode matchMode);
+ public <S extends T> Pager<S> findTitleCache(Class<S> clazz, String queryString, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, MatchMode matchMode);
/**
*/\r
public MergeResult<T> merge(T newInstance, boolean returnTransientEntity);\r
\r
- public Pager<T> page(Class<? extends T> clazz, String param, String queryString, MatchMode matchmode, List<Criterion> criteria, Integer pageSize, Integer pageIndex, List<OrderHint> orderHints, List<String> propertyPaths);\r
+ public <S extends T> Pager<S> page(Class<S> clazz, String param, String queryString, MatchMode matchmode, List<Criterion> criteria, Integer pageSize, Integer pageIndex, List<OrderHint> orderHints, List<String> propertyPaths);\r
\r
- public Pager<T> pageByRestrictions(Class<? extends T> clazz, String param, String queryString, MatchMode matchmode, List<Restriction<?>> restrictions, Integer pageSize, Integer pageIndex, List<OrderHint> orderHints,\r
+ public <S extends T> Pager<S> pageByRestrictions(Class<S> clazz, String param, String queryString, MatchMode matchmode, List<Restriction<?>> restrictions, Integer pageSize, Integer pageIndex, List<OrderHint> orderHints,\r
List<String> propertyPaths);\r
\r
}
*/
public TaxonNodeDto findCommonParentDto(Collection<TaxonNodeDto> nodes);
+ /**
+ * @param taxonNodeUuid
+ * @return
+ */
+ TaxonNodeDto dto(UUID taxonNodeUuid);
+
}
*/
@Deprecated
public Pager<SearchResult<TaxonBase>> findByEverythingFullText(String queryString,
- Classification classification, boolean includeUnpublished, List<Language> languages, boolean highlightFragments,
+ Classification classification, TaxonNode subtree, boolean includeUnpublished, List<Language> languages, boolean highlightFragments,
Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) throws IOException, LuceneParseException, LuceneMultiSearchException;
/**
* @throws LuceneParseException
*/
public Pager<SearchResult<TaxonBase>> findByFullText(Class<? extends TaxonBase> clazz, String queryString,
- Classification classification, boolean includeUnpublished, List<Language> languages,
- boolean highlightFragments, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints,
- List<String> propertyPaths) throws IOException, LuceneParseException;
+ Classification classification, TaxonNode subtree,
+ boolean includeUnpublished, List<Language> languages,
+ boolean highlightFragments, Integer pageSize, Integer pageNumber,
+ List<OrderHint> orderHints, List<String> propertyPaths)
+ throws IOException, LuceneParseException;
/**
* @throws IOException
* @throws LuceneParseException
*/
+ //TODO needed? currently only used in test
public Pager<SearchResult<TaxonBase>> findByDistribution(List<NamedArea> areaFilter, List<PresenceAbsenceTerm> statusFilter,
- Classification classification,
+ Classification classification, TaxonNode subtree,
Integer pageSize, Integer pageNumber,
List<OrderHint> orderHints, List<String> propertyPaths) throws IOException, LuceneParseException;
* Additional filter criterion: If a taxonomic classification
* three is specified here the result set will only contain taxa
* of the given classification
+ * @param subtree
* @param namedAreas
+ * @param distributionStatus
* @param languages
* Additional filter criterion: Search only in these languages.
* Not all text fields in the cdm model are multilingual, thus
*/
public Pager<SearchResult<TaxonBase>> findTaxaAndNamesByFullText(
EnumSet<TaxaAndNamesSearchMode> searchModes,
- String queryString, Classification classification, Set<NamedArea> namedAreas, Set<PresenceAbsenceTerm> distributionStatus,
+ String queryString, Classification classification, TaxonNode subtree,
+ Set<NamedArea> namedAreas, Set<PresenceAbsenceTerm> distributionStatus,
List<Language> languages, boolean highlightFragments, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints,
List<String> propertyPaths) throws IOException, LuceneParseException, LuceneMultiSearchException;
* the query string to filter by
* @param classification
* Additional filter criterion: If a taxonomic classification
- * three is specified here the result set will only contain taxa
+ * tree is specified here the result set will only contain taxa
* of the given classification
+ * @param subtree
+ * Additional filter criterion: If a taxonomic classification
+ * subtree is specified here the result set will only contain taxa
+ * of the given subtree
* @param features
* TODO
* @param languages
* Additional filter criterion: Search only in these languages.
- * Not all text fields in the cdm model are multilingual, thus
- * this setting will only apply to the multilingiual fields.
+ * Not all text fields in the CDM model are multi-lingual, thus
+ * this setting will only apply to the multi-lingual fields.
* Other fields are searched nevertheless if this parameter is
* set or not.
* @param highlightFragments
* @throws LuceneCorruptIndexException
* @throws LuceneParseException
*/
- public Pager<SearchResult<TaxonBase>> findByDescriptionElementFullText(Class<? extends DescriptionElementBase> clazz, String queryString, Classification classification, List<Feature> features, List<Language> languages, boolean highlightFragments, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) throws IOException, LuceneParseException;
+ public Pager<SearchResult<TaxonBase>> findByDescriptionElementFullText(Class<? extends DescriptionElementBase> clazz,
+ String queryString, Classification classification, TaxonNode subtree, List<Feature> features, List<Language> languages, boolean highlightFragments, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) throws IOException, LuceneParseException;
/**
* Lists all Media found in an any TaxonDescription associated with this
@Transactional(readOnly = true)
@Override
- public Pager<T> findByTitle(Class<? extends T> clazz, String queryString,MatchMode matchmode, List<Criterion> criteria, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) {
+ public <S extends T> Pager<S> findByTitle(Class<S> clazz, String queryString,MatchMode matchmode, List<Criterion> criteria, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) {
long numberOfResults = dao.countByTitle(clazz, queryString, matchmode, criteria);
- List<T> results = new ArrayList<>();
+ List<S> results = new ArrayList<>();
if(numberOfResults > 0) { // no point checking again //TODO use AbstractPagerImpl.hasResultsInRange(numberOfResults, pageNumber, pageSize)
results = dao.findByTitle(clazz, queryString, matchmode, criteria, pageSize, pageNumber, orderHints, propertyPaths);
}
@Transactional(readOnly = true)
@Override
- public Pager<T> findByTitleWithRestrictions(Class<? extends T> clazz, String queryString, MatchMode matchmode, List<Restriction<?>> restrictions, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) {
+ public <S extends T> Pager<S> findByTitleWithRestrictions(Class<S> clazz, String queryString, MatchMode matchmode, List<Restriction<?>> restrictions, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) {
long numberOfResults = dao.countByTitleWithRestrictions(clazz, queryString, matchmode, restrictions);
- List<T> results = new ArrayList<>();
+ List<S> results = new ArrayList<>();
if(numberOfResults > 0) { // no point checking again //TODO use AbstractPagerImpl.hasResultsInRange(numberOfResults, pageNumber, pageSize)
results = dao.findByTitleWithRestrictions(clazz, queryString, matchmode, restrictions, pageSize, pageNumber, orderHints, propertyPaths);
}
@Transactional(readOnly = true)
@Override
- public Pager<T> findByTitle(IIdentifiableEntityServiceConfigurator<T> config){
+ public <S extends T> Pager<S> findByTitle(IIdentifiableEntityServiceConfigurator<S> config){
boolean withRestrictions = config.getRestrictions() != null && !config.getRestrictions().isEmpty();
boolean withCriteria = config.getCriteria() != null && !config.getCriteria().isEmpty();
if(withCriteria && withRestrictions){
throw new RuntimeException("Restrictions and Criteria can not be used at the same time");
} else if(withRestrictions){
- return findByTitleWithRestrictions(config.getClazz(), config.getTitleSearchStringSqlized(), config.getMatchMode(), config.getRestrictions(), config.getPageSize(), config.getPageNumber(), config.getOrderHints(), config.getPropertyPaths());
+ return findByTitleWithRestrictions((Class<S>)config.getClazz(), config.getTitleSearchStringSqlized(), config.getMatchMode(), config.getRestrictions(), config.getPageSize(), config.getPageNumber(), config.getOrderHints(), config.getPropertyPaths());
} else {
- return findByTitle(config.getClazz(), config.getTitleSearchStringSqlized(), config.getMatchMode(), config.getCriteria(), config.getPageSize(), config.getPageNumber(), config.getOrderHints(), config.getPropertyPaths());
+ return findByTitle((Class<S>) config.getClazz(), config.getTitleSearchStringSqlized(), config.getMatchMode(), config.getCriteria(), config.getPageSize(), config.getPageNumber(), config.getOrderHints(), config.getPropertyPaths());
}
}
@Transactional(readOnly = true)
@Override
- public List<T> listByTitle(Class<? extends T> clazz, String queryString,MatchMode matchmode, List<Criterion> criteria, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) {
+ public <S extends T> List<S> listByTitle(Class<S> clazz, String queryString,MatchMode matchmode, List<Criterion> criteria, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) {
long numberOfResults = dao.countByTitle(clazz, queryString, matchmode, criteria);
- List<T> results = new ArrayList<>();
+ List<S> results = new ArrayList<>();
if(numberOfResults > 0) { // no point checking again //TODO use AbstractPagerImpl.hasResultsInRange(numberOfResults, pageNumber, pageSize)
results = dao.findByTitle(clazz, queryString, matchmode, criteria, pageSize, pageNumber, orderHints, propertyPaths);
}
@Transactional(readOnly = true)
@Override
- public List<T> listByTitleWithRestrictions(Class<? extends T> clazz, String queryString,MatchMode matchmode, List<Restriction<?>> restrictions, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) {
+ public <S extends T> List<S> listByTitleWithRestrictions(Class<S> clazz, String queryString,MatchMode matchmode, List<Restriction<?>> restrictions, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) {
long numberOfResults = dao.countByTitleWithRestrictions(clazz, queryString, matchmode, restrictions);
- List<T> results = new ArrayList<>();
+ List<S> results = new ArrayList<>();
if(numberOfResults > 0) { // no point checking again //TODO use AbstractPagerImpl.hasResultsInRange(numberOfResults, pageNumber, pageSize)
results = dao.findByTitleWithRestrictions(clazz, queryString, matchmode, restrictions, pageSize, pageNumber, orderHints, propertyPaths);
}
@Transactional(readOnly = true)
@Override
- public Pager<T> findTitleCache(Class<? extends T> clazz, String queryString, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, MatchMode matchMode){
+ public <S extends T> Pager<S> findTitleCache(Class<S> clazz, String queryString, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, MatchMode matchMode){
long numberOfResults = dao.countTitleCache(clazz, queryString, matchMode);
- List<T> results = new ArrayList<>();
+ List<S> results = new ArrayList<>();
if(numberOfResults > 0) { // no point checking again //TODO use AbstractPagerImpl.hasResultsInRange(numberOfResults, pageNumber, pageSize)
results = dao.findTitleCache(clazz, queryString, pageSize, pageNumber, orderHints, matchMode);
}
@Transactional(readOnly = true)
@Override
- public List<T> listByReferenceTitle(Class<? extends T> clazz, String queryString,MatchMode matchmode, List<Criterion> criteria, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) {
+ public <S extends T> List<S> listByReferenceTitle(Class<S> clazz, String queryString,MatchMode matchmode, List<Criterion> criteria, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) {
long numberOfResults = dao.countByReferenceTitle(clazz, queryString, matchmode, criteria);
- List<T> results = new ArrayList<>();
+ List<S> results = new ArrayList<>();
if(numberOfResults > 0) { // no point checking again //TODO use AbstractPagerImpl.hasResultsInRange(numberOfResults, pageNumber, pageSize)
results = dao.findByReferenceTitle(clazz, queryString, matchmode, criteria, pageSize, pageNumber, orderHints, propertyPaths);
}
@Transactional(readOnly = true)
@Override
- public List<T> listByReferenceTitleWithRestrictions(Class<? extends T> clazz, String queryString,MatchMode matchmode, List<Restriction<?>> restrictions, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) {
+ public <S extends T> List<S> listByReferenceTitleWithRestrictions(Class<S> clazz, String queryString,MatchMode matchmode, List<Restriction<?>> restrictions, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) {
long numberOfResults = dao.countByReferenceTitleWithRestrictions(clazz, queryString, matchmode, restrictions);
- List<T> results = new ArrayList<>();
+ List<S> results = new ArrayList<>();
if(numberOfResults > 0) { // no point checking again //TODO use AbstractPagerImpl.hasResultsInRange(numberOfResults, pageNumber, pageSize)
results = dao.findByReferenceTitleWithRestrictions(clazz, queryString, matchmode, restrictions, pageSize, pageNumber, orderHints, propertyPaths);
}
@Transactional(readOnly = true)
@Override
public Pager<T> search(Class<? extends T> clazz, String queryString, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) {
+// public <S extends T> Pager<S> search(Class<S> clazz, String queryString, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) {
long numberOfResults = dao.count(clazz,queryString);
List<T> results = new ArrayList<>();
while (! dedupState.isCompleted){
//get x page sizes
- List<T> objectList = getPages(clazz, dedupState, orderHints);
+ List<? extends T> objectList = getPages(clazz, dedupState, orderHints);
//after each page check if any changes took place
int nUnEqualPages = handleAllPages(objectList, dedupState, nextGroup, matchStrategy, mergeStrategy);
nUnEqualPages = nUnEqualPages + dedupState.pageSize * dedupState.startPage;
}
- private int handleAllPages(List<T> objectList, DeduplicateState dedupState, List<T> nextGroup, IMatchStrategy matchStrategy, IMergeStrategy mergeStrategy) {
+ private int handleAllPages(List<? extends T> objectList, DeduplicateState dedupState, List<T> nextGroup, IMatchStrategy matchStrategy, IMergeStrategy mergeStrategy) {
int nUnEqual = 0;
for (T object : objectList){
String currentTitleCache = object.getTitleCache();
return nUnEqual;
}
- private List<T> getPages(Class<? extends T> clazz, DeduplicateState dedupState, List<OrderHint> orderHints) {
- List<T> result = new ArrayList<>();
+ private <S extends T> List<S> getPages(Class<S> clazz, DeduplicateState dedupState, List<OrderHint> orderHints) {
+ List<S> result = new ArrayList<>();
for (int pageNo = dedupState.startPage; pageNo < dedupState.startPage + dedupState.nPages; pageNo++){
- List<T> objectList = listByTitleWithRestrictions(clazz, null, null, null, dedupState.pageSize, pageNo, orderHints, null);
+ List<S> objectList = listByTitleWithRestrictions(clazz, null, null, null, dedupState.pageSize, pageNo, orderHints, null);
result.addAll(objectList);
}
if (result.size()< dedupState.nPages * dedupState.pageSize ){
specimenIdentifier = CdmFormatterFactory.format(derivedUnit, new FormatKey[] {
collectionKey, FormatKey.SPACE,
FormatKey.MOST_SIGNIFICANT_IDENTIFIER, FormatKey.SPACE });
+ if(CdmUtils.isBlank(specimenIdentifier)){
+ specimenIdentifier = derivedUnit.getTitleCache();
+ }
if(CdmUtils.isBlank(specimenIdentifier)){
specimenIdentifier = derivedUnit.getUuid().toString();
}
fieldUnitDTO.setHasType(true);
}
TypeDesignationStatusBase<?> typeStatus = specimenTypeDesignation.getTypeStatus();
- if (typeStatus != null) {
- List<String> typedTaxaNames = new ArrayList<>();
- String label = typeStatus.getLabel();
- Set<TaxonName> typifiedNames = specimenTypeDesignation.getTypifiedNames();
- for (TaxonName taxonName : typifiedNames) {
- typedTaxaNames.add(taxonName.getNameCache());
- }
- preservedSpecimenDTO.addTypes(label, typedTaxaNames);
+ Set<TaxonName> typifiedNames = specimenTypeDesignation.getTypifiedNames();
+ List<String> typedTaxaNames = new ArrayList<>();
+ for (TaxonName taxonName : typifiedNames) {
+ typedTaxaNames.add(taxonName.getTitleCache());
}
+ preservedSpecimenDTO.addTypes(typeStatus!=null?typeStatus.getLabel():"", typedTaxaNames);
}
// individuals associations
if (derivative instanceof DnaSample) {
dto = new DNASampleDTO(derivative);
} else {
- dto = PreservedSpecimenDTO.newInstance(derivative);
+ dto = new PreservedSpecimenDTO(derivative);
}
alreadyCollectedSpecimen.put(dto.getUuid(), dto);
dto.addAllDerivates(getDerivedUnitDTOsFor(dto, derivative, alreadyCollectedSpecimen));
derivedUnitDTO = new DNASampleDTO(derivedUnit);
} else {
derivedUnit = HibernateProxyHelper.deproxy(o, DerivedUnit.class);
- derivedUnitDTO = PreservedSpecimenDTO.newInstance(derivedUnit);
+ derivedUnitDTO = new PreservedSpecimenDTO(derivedUnit);
}
if (alreadyCollectedSpecimen.get(derivedUnitDTO.getUuid()) == null){
alreadyCollectedSpecimen.put(derivedUnitDTO.getUuid(), derivedUnitDTO);
if (specimen instanceof DnaSample){
originalDTO = new DNASampleDTO((DnaSample)specimen);
} else {
- originalDTO = PreservedSpecimenDTO.newInstance((DerivedUnit)specimen);
+ originalDTO = new PreservedSpecimenDTO((DerivedUnit)specimen);
}
originalDTO.addDerivate(derivedUnitDTO);
fieldUnitDto = findFieldUnitDTO(originalDTO, fieldUnits, alreadyCollectedSpecimen);
}
@Override
- public Pager<SpecimenOrObservationBase> findByTitle(
- IIdentifiableEntityServiceConfigurator<SpecimenOrObservationBase> config) {
+ public <S extends SpecimenOrObservationBase> Pager<S> findByTitle(
+ IIdentifiableEntityServiceConfigurator<S> config) {
if (config instanceof FindOccurrencesConfigurator) {
FindOccurrencesConfigurator occurrenceConfig = (FindOccurrencesConfigurator) config;
List<SpecimenOrObservationBase> occurrences = new ArrayList<>();
if(occurrenceConfig.getAssociatedTaxonNameUuid()!=null){
taxonName = nameService.load(occurrenceConfig.getAssociatedTaxonNameUuid());
}
- occurrences.addAll(dao.findOccurrences(occurrenceConfig.getClazz(),
+ List<? extends SpecimenOrObservationBase> foundOccurrences = dao.findOccurrences(occurrenceConfig.getClazz(),
occurrenceConfig.getTitleSearchString(), occurrenceConfig.getSignificantIdentifier(),
occurrenceConfig.getSpecimenType(), taxon, taxonName, occurrenceConfig.getMatchMode(), null, null,
- occurrenceConfig.getOrderHints(), occurrenceConfig.getPropertyPaths()));
+ occurrenceConfig.getOrderHints(), occurrenceConfig.getPropertyPaths());
+ occurrences.addAll(foundOccurrences);
occurrences = filterOccurencesByAssignmentAndHierarchy(occurrenceConfig, occurrences, taxon, taxonName);
- return new DefaultPagerImpl<>(config.getPageNumber(), occurrences.size(), config.getPageSize(), occurrences);
+ return new DefaultPagerImpl<>(config.getPageNumber(), occurrences.size(), config.getPageSize(), (List<S>)occurrences);
}
return super.findByTitle(config);
}
//filter out (un-)assigned specimens
if(taxon==null && taxonName==null){
AssignmentStatus assignmentStatus = occurrenceConfig.getAssignmentStatus();
- List<SpecimenOrObservationBase<?>> specimenWithAssociations = new ArrayList<>();
+ List<SpecimenOrObservationBase> specimenWithAssociations = new ArrayList<>();
if(!assignmentStatus.equals(AssignmentStatus.ALL_SPECIMENS)){
- for (SpecimenOrObservationBase<?> specimenOrObservationBase : occurrences) {
+ for (SpecimenOrObservationBase specimenOrObservationBase : occurrences) {
boolean includeUnpublished = true; //TODO not sure if this is correct, maybe we have to propagate publish flag to higher methods.
Collection<TaxonBase<?>> associatedTaxa = listAssociatedTaxa(specimenOrObservationBase,
includeUnpublished, null, null, null, null);
\r
@Override\r
@Transactional(readOnly = true)\r
- public Pager<T> page(Class<? extends T> clazz, String param, String queryString, MatchMode matchmode, List<Criterion> criteria, Integer pageSize, Integer pageIndex, List<OrderHint> orderHints, List<String> propertyPaths){\r
+ public <S extends T> Pager<S> page(Class<S> clazz, String param, String queryString, MatchMode matchmode, List<Criterion> criteria, Integer pageSize, Integer pageIndex, List<OrderHint> orderHints, List<String> propertyPaths){\r
\r
- List<T> records;\r
+ List<S> records;\r
long resultSize = dao.countByParam(clazz, param, queryString, matchmode, criteria);\r
if(AbstractPagerImpl.hasResultsInRange(resultSize, pageIndex, pageSize)){\r
records = dao.findByParam(clazz, param, queryString, matchmode, criteria, pageSize, pageIndex, orderHints, propertyPaths);\r
} else {\r
records = new ArrayList<>();\r
}\r
- Pager<T> pager = new DefaultPagerImpl<>(pageIndex, resultSize, pageSize, records);\r
- return pager;\r
+ return new DefaultPagerImpl<>(pageIndex, resultSize, pageSize, records);\r
}\r
\r
\r
@Override\r
@Transactional(readOnly = true)\r
- public Pager<T> pageByRestrictions(Class<? extends T> clazz, String param, String queryString, MatchMode matchmode, List<Restriction<?>> restrictions, Integer pageSize, Integer pageIndex, List<OrderHint> orderHints, List<String> propertyPaths){\r
+ public <S extends T> Pager<S> pageByRestrictions(Class<S> clazz, String param, String queryString, MatchMode matchmode, List<Restriction<?>> restrictions, Integer pageSize, Integer pageIndex, List<OrderHint> orderHints, List<String> propertyPaths){\r
\r
- List<T> records;\r
+ List<S> records;\r
long resultSize = dao.countByParamWithRestrictions(clazz, param, queryString, matchmode, restrictions);\r
if(AbstractPagerImpl.hasResultsInRange(resultSize, pageIndex, pageSize)){\r
records = dao.findByParamWithRestrictions(clazz, param, queryString, matchmode, restrictions, pageSize, pageIndex, orderHints, propertyPaths);\r
} else {\r
records = new ArrayList<>();\r
}\r
- Pager<T> pager = new DefaultPagerImpl<>(pageIndex, resultSize, pageSize, records);\r
+ Pager<S> pager = new DefaultPagerImpl<>(pageIndex, resultSize, pageSize, records);\r
return pager;\r
}\r
\r
return null;
}
+ @Override
+ public TaxonNodeDto dto(UUID taxonNodeUuid) {
+ TaxonNode taxonNode = dao.load(taxonNodeUuid);
+ if(taxonNode.getParent() != null) {
+ return new TaxonNodeDto(taxonNode);
+ }
+ return null;
+ }
+
@Override
@Autowired
protected void setDao(ITaxonNodeDao dao) {
import javax.persistence.EntityNotFoundException;
-import org.apache.commons.lang.StringUtils;
+import org.apache.commons.lang3.StringUtils;
import org.apache.log4j.Logger;
import org.apache.lucene.queryparser.classic.ParseException;
import org.apache.lucene.search.BooleanClause.Occur;
}
// filter by includeRelationships
for (TaxonRelationshipEdge relationshipEdgeFilter : includeRelationships) {
- if ( relationshipEdgeFilter.getTaxonRelationshipTypes().equals(taxRel.getType()) ) {
+ if ( relationshipEdgeFilter.getRelationshipTypes().equals(taxRel.getType()) ) {
if (relationshipEdgeFilter.getDirections().contains(Direction.relatedTo) && !taxa.contains(taxRel.getToTaxon())) {
if(logger.isDebugEnabled()){
logger.debug(maxDepth + ": " + taxon.getTitleCache() + " --[" + taxRel.getType().getLabel() + "]--> " + taxRel.getToTaxon().getTitleCache());
if (config.isDoSynonyms() || config.isDoTaxa() || config.isDoNamesWithoutTaxa() || config.isDoTaxaByCommonNames()){
return dao.getTaxaByNameForEditor(config.isDoTaxa(), config.isDoSynonyms(), config.isDoNamesWithoutTaxa(),
config.isDoMisappliedNames(), config.isDoTaxaByCommonNames(), config.isIncludeUnpublished(),
- config.getTitleSearchStringSqlized(), config.getClassification(), config.getMatchMode(), config.getNamedAreas(), config.getOrder());
+ config.getTitleSearchStringSqlized(), config.getClassification(), config.getSubtree(),
+ config.getMatchMode(), config.getNamedAreas(), config.getOrder());
}else{
return new ArrayList<>();
}
// Taxa and synonyms
long numberTaxaResults = 0L;
-
List<String> propertyPath = new ArrayList<>();
if(configurator.getTaxonPropertyPath() != null){
propertyPath.addAll(configurator.getTaxonPropertyPath());
}
-
- if (configurator.isDoMisappliedNames() || configurator.isDoSynonyms() || configurator.isDoTaxa() || configurator.isDoTaxaByCommonNames()){
+ if (configurator.isDoMisappliedNames() || configurator.isDoSynonyms() || configurator.isDoTaxa() || configurator.isDoTaxaByCommonNames()){
if(configurator.getPageSize() != null){ // no point counting if we need all anyway
numberTaxaResults =
dao.countTaxaByName(configurator.isDoTaxa(),configurator.isDoSynonyms(), configurator.isDoMisappliedNames(),
configurator.isDoTaxaByCommonNames(), configurator.isDoIncludeAuthors(), configurator.getTitleSearchStringSqlized(),
- configurator.getClassification(), configurator.getMatchMode(),
+ configurator.getClassification(), configurator.getSubtree(), configurator.getMatchMode(),
configurator.getNamedAreas(), configurator.isIncludeUnpublished());
}
if(configurator.getPageSize() == null || numberTaxaResults > configurator.getPageSize() * configurator.getPageNumber()){ // no point checking again if less results
taxa = dao.getTaxaByName(configurator.isDoTaxa(), configurator.isDoSynonyms(),
configurator.isDoMisappliedNames(), configurator.isDoTaxaByCommonNames(), configurator.isDoIncludeAuthors(),
- configurator.getTitleSearchStringSqlized(), configurator.getClassification(),
+ configurator.getTitleSearchStringSqlized(), configurator.getClassification(), configurator.getSubtree(),
configurator.getMatchMode(), configurator.getNamedAreas(), configurator.isIncludeUnpublished(),
configurator.getOrder(), configurator.getPageSize(), configurator.getPageNumber(), propertyPath);
}
try{
// 1. search for accepted taxa
List<TaxonBase> taxonList = dao.findByNameTitleCache(true, false, config.isIncludeUnpublished(),
- config.getTaxonNameTitle(), null, MatchMode.EXACT, null, null, 0, null, null);
+ config.getTaxonNameTitle(), null, null, MatchMode.EXACT, null, null, 0, null, null);
boolean bestCandidateMatchesSecUuid = false;
boolean bestCandidateIsInClassification = false;
int countEqualCandidates = 0;
- for(TaxonBase taxonBaseCandidate : taxonList){
+ for(TaxonBase<?> taxonBaseCandidate : taxonList){
if(taxonBaseCandidate instanceof Taxon){
Taxon newCanditate = CdmBase.deproxy(taxonBaseCandidate, Taxon.class);
boolean newCandidateMatchesSecUuid = isMatchesSecUuid(newCanditate, config);
// 2. search for synonyms
if (config.isIncludeSynonyms()){
List<TaxonBase> synonymList = dao.findByNameTitleCache(false, true, config.isIncludeUnpublished(),
- config.getTaxonNameTitle(), null, MatchMode.EXACT, null, null, 0, null, null);
+ config.getTaxonNameTitle(), null, null, MatchMode.EXACT, null, null, 0, null, null);
for(TaxonBase taxonBase : synonymList){
if(taxonBase instanceof Synonym){
Synonym synonym = CdmBase.deproxy(taxonBase, Synonym.class);
@Override
public Synonym findBestMatchingSynonym(String taxonName, boolean includeUnpublished) {
- List<TaxonBase> synonymList = dao.findByNameTitleCache(false, true, includeUnpublished, taxonName, null, MatchMode.EXACT, null, null, 0, null, null);
+ List<TaxonBase> synonymList = dao.findByNameTitleCache(false, true, includeUnpublished, taxonName, null, null, MatchMode.EXACT, null, null, 0, null, null);
if(! synonymList.isEmpty()){
Synonym result = CdmBase.deproxy(synonymList.iterator().next(), Synonym.class);
if(synonymList.size() == 1){
@Override
public Pager<SearchResult<TaxonBase>> findByFullText(
Class<? extends TaxonBase> clazz, String queryString,
- Classification classification, boolean includeUnpublished, List<Language> languages,
+ Classification classification, TaxonNode subtree, boolean includeUnpublished, List<Language> languages,
boolean highlightFragments, Integer pageSize, Integer pageNumber,
List<OrderHint> orderHints, List<String> propertyPaths) throws IOException, LuceneParseException {
- LuceneSearch luceneSearch = prepareFindByFullTextSearch(clazz, queryString, classification, null,
- includeUnpublished, languages, highlightFragments, null);
+ LuceneSearch luceneSearch = prepareFindByFullTextSearch(clazz, queryString, classification, subtree,
+ null, includeUnpublished, languages, highlightFragments, null);
// --- execute search
TopGroups<BytesRef> topDocsResultSet;
@Override
public Pager<SearchResult<TaxonBase>> findByDistribution(List<NamedArea> areaFilter, List<PresenceAbsenceTerm> statusFilter,
- Classification classification,
+ Classification classification, TaxonNode subtree,
Integer pageSize, Integer pageNumber,
List<OrderHint> orderHints, List<String> propertyPaths) throws IOException, LuceneParseException {
- LuceneSearch luceneSearch = prepareByDistributionSearch(areaFilter, statusFilter, classification);
+ LuceneSearch luceneSearch = prepareByDistributionSearch(areaFilter, statusFilter, classification, subtree);
// --- execute search
TopGroups<BytesRef> topDocsResultSet;
* @return
*/
protected LuceneSearch prepareFindByFullTextSearch(Class<? extends CdmBase> clazz, String queryString,
- Classification classification, String className, boolean includeUnpublished, List<Language> languages,
+ Classification classification, TaxonNode subtree, String className, boolean includeUnpublished, List<Language> languages,
boolean highlightFragments, SortField[] sortFields) {
Builder finalQueryBuilder = new Builder();
// ---- search criteria
luceneSearch.setCdmTypRestriction(clazz);
- if(!queryString.isEmpty() && !queryString.equals("*") && !queryString.equals("?") ) {
+ if(!StringUtils.isEmpty(queryString) && !queryString.equals("*") && !queryString.equals("?") ) {
textQueryBuilder.add(taxonBaseQueryFactory.newTermQuery("titleCache", queryString), Occur.SHOULD);
textQueryBuilder.add(taxonBaseQueryFactory.newDefinedTermQuery("name.rank", queryString, languages), Occur.SHOULD);
}
finalQueryBuilder.add(textQuery, Occur.MUST);
}
-
if(classification != null){
- finalQueryBuilder.add(taxonBaseQueryFactory.newEntityIdQuery("taxonNodes.classification.id", classification), Occur.MUST);
+ finalQueryBuilder.add(taxonBaseQueryFactory.newEntityIdQuery(AcceptedTaxonBridge.DOC_KEY_CLASSIFICATION_ID, classification), Occur.MUST);
+ }
+ if(subtree != null){
+ finalQueryBuilder.add(taxonBaseQueryFactory.newTermQuery(AcceptedTaxonBridge.DOC_KEY_TREEINDEX, subtree.treeIndexWc(), true), Occur.MUST);
}
if(!includeUnpublished) {
String accPublishParam = TaxonBase.ACC_TAXON_BRIDGE_PREFIX + AcceptedTaxonBridge.DOC_KEY_PUBLISH_SUFFIX;
* @throws IOException
*/
protected LuceneSearch prepareFindByTaxonRelationFullTextSearch(TaxonRelationshipEdge edge, String queryString,
- Classification classification, boolean includeUnpublished, List<Language> languages,
+ Classification classification, TaxonNode subtree, boolean includeUnpublished, List<Language> languages,
boolean highlightFragments, SortField[] sortFields) throws IOException {
String fromField;
QueryFactory taxonBaseQueryFactory = luceneIndexToolProvider.newQueryFactoryFor(TaxonBase.class);
Builder joinFromQueryBuilder = new Builder();
- joinFromQueryBuilder.add(taxonBaseQueryFactory.newTermQuery(queryTermField, queryString), Occur.MUST);
- joinFromQueryBuilder.add(taxonBaseQueryFactory.newEntityIdsQuery("type.id", edge.getTaxonRelationshipTypes()), Occur.MUST);
+ if(!StringUtils.isEmpty(queryString)){
+ joinFromQueryBuilder.add(taxonBaseQueryFactory.newTermQuery(queryTermField, queryString), Occur.MUST);
+ }
+ joinFromQueryBuilder.add(taxonBaseQueryFactory.newEntityIdsQuery("type.id", edge.getRelationshipTypes()), Occur.MUST);
if(!includeUnpublished){
joinFromQueryBuilder.add(taxonBaseQueryFactory.newBooleanQuery(publishField, true), Occur.MUST);
joinFromQueryBuilder.add(taxonBaseQueryFactory.newBooleanQuery(publishFieldInvers, true), Occur.MUST);
finalQueryBuilder.add(joinQuery, Occur.MUST);
if(classification != null){
- finalQueryBuilder.add(taxonBaseQueryFactory.newEntityIdQuery("taxonNodes.classification.id", classification), Occur.MUST);
+ finalQueryBuilder.add(taxonBaseQueryFactory.newEntityIdQuery(AcceptedTaxonBridge.DOC_KEY_CLASSIFICATION_ID, classification), Occur.MUST);
+ }
+ if(subtree != null){
+ finalQueryBuilder.add(taxonBaseQueryFactory.newTermQuery(AcceptedTaxonBridge.DOC_KEY_TREEINDEX, subtree.treeIndexWc(), true), Occur.MUST);
}
luceneSearch.setQuery(finalQueryBuilder.build());
@Override
public Pager<SearchResult<TaxonBase>> findTaxaAndNamesByFullText(
- EnumSet<TaxaAndNamesSearchMode> searchModes, String queryString, Classification classification,
+ EnumSet<TaxaAndNamesSearchMode> searchModes, String queryString,
+ Classification classification, TaxonNode subtree,
Set<NamedArea> namedAreas, Set<PresenceAbsenceTerm> distributionStatus, List<Language> languages,
boolean highlightFragments, Integer pageSize,
Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths)
// convert sets to lists
List<NamedArea> namedAreaList = null;
- List<PresenceAbsenceTerm>distributionStatusList = null;
+ List<PresenceAbsenceTerm> distributionStatusList = null;
if(namedAreas != null){
namedAreaList = new ArrayList<>(namedAreas.size());
namedAreaList.addAll(namedAreas);
} else if (!searchModes.contains(TaxaAndNamesSearchMode.doTaxa) && searchModes.contains(TaxaAndNamesSearchMode.doSynonyms)) {
className = "eu.etaxonomy.cdm.model.taxon.Synonym";
}
- luceneSearches.add(prepareFindByFullTextSearch(taxonBaseSubclass, queryString, classification, className,
+ luceneSearches.add(prepareFindByFullTextSearch(taxonBaseSubclass,
+ queryString, classification, subtree, className,
includeUnpublished, languages, highlightFragments, sortFields));
idFieldMap.put(CdmBaseType.TAXON, "id");
/* A) does not work!!!!
"inDescription.taxon.id",
true,
QueryFactory.addTypeRestriction(
- createByDescriptionElementFullTextQuery(queryString, classification, null, languages, descriptionElementQueryFactory)
+ createByDescriptionElementFullTextQuery(queryString, classification, subtree, null, languages, descriptionElementQueryFactory)
, CommonTaxonName.class
).build(), "id", null, ScoreMode.Max);
if (logger.isDebugEnabled()){logger.debug("byCommonNameJoinQuery: " + byCommonNameJoinQuery.toString());}
luceneSearches.add(prepareFindByTaxonRelationFullTextSearch(
new TaxonRelationshipEdge(relTypes, Direction.relatedTo),
- queryString, classification, includeUnpublished, languages, highlightFragments, sortFields));
+ queryString, classification, subtree, includeUnpublished, languages, highlightFragments, sortFields));
idFieldMap.put(CdmBaseType.TAXON, "id");
if(addDistributionFilter){
luceneSearches.add(prepareFindByTaxonRelationFullTextSearch(
new TaxonRelationshipEdge(relTypes, Direction.relatedTo),
- queryString, classification, includeUnpublished, languages, highlightFragments, sortFields));
+ queryString, classification, subtree, includeUnpublished, languages, highlightFragments, sortFields));
idFieldMap.put(CdmBaseType.TAXON, "id");
if(addDistributionFilter){
*/
protected LuceneSearch prepareByDistributionSearch(
List<NamedArea> namedAreaList, List<PresenceAbsenceTerm> distributionStatusList,
- Classification classification) throws IOException {
+ Classification classification, TaxonNode subtree) throws IOException {
Builder finalQueryBuilder = new Builder();
finalQueryBuilder.add(byAreaQuery, Occur.MUST);
if(classification != null){
- finalQueryBuilder.add(taxonQueryFactory.newEntityIdQuery("taxonNodes.classification.id", classification), Occur.MUST);
+ finalQueryBuilder.add(taxonQueryFactory.newEntityIdQuery(AcceptedTaxonBridge.DOC_KEY_CLASSIFICATION_ID, classification), Occur.MUST);
+ }
+ if(subtree != null){
+ finalQueryBuilder.add(taxonQueryFactory.newTermQuery(AcceptedTaxonBridge.DOC_KEY_TREEINDEX, subtree.treeIndexWc(), true), Occur.MUST);
}
BooleanQuery finalQuery = finalQueryBuilder.build();
logger.info("prepareByAreaSearch() query: " + finalQuery.toString());
@Override
public Pager<SearchResult<TaxonBase>> findByDescriptionElementFullText(
Class<? extends DescriptionElementBase> clazz, String queryString,
- Classification classification, List<Feature> features, List<Language> languages,
+ Classification classification, TaxonNode subtree, List<Feature> features, List<Language> languages,
boolean highlightFragments, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) throws IOException, LuceneParseException {
- LuceneSearch luceneSearch = prepareByDescriptionElementFullTextSearch(clazz, queryString, classification, features, languages, highlightFragments);
+ LuceneSearch luceneSearch = prepareByDescriptionElementFullTextSearch(clazz, queryString, classification, subtree, features, languages, highlightFragments);
// --- execute search
TopGroups<BytesRef> topDocsResultSet;
@Override
public Pager<SearchResult<TaxonBase>> findByEverythingFullText(String queryString,
- Classification classification, boolean includeUnpublished, List<Language> languages, boolean highlightFragments,
+ Classification classification, TaxonNode subtree, boolean includeUnpublished, List<Language> languages, boolean highlightFragments,
Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) throws IOException, LuceneParseException, LuceneMultiSearchException {
- LuceneSearch luceneSearchByDescriptionElement = prepareByDescriptionElementFullTextSearch(null, queryString, classification,
+ LuceneSearch luceneSearchByDescriptionElement = prepareByDescriptionElementFullTextSearch(null, queryString,
+ classification, subtree,
null, languages, highlightFragments);
- LuceneSearch luceneSearchByTaxonBase = prepareFindByFullTextSearch(null, queryString, classification, null,
+ LuceneSearch luceneSearchByTaxonBase = prepareFindByFullTextSearch(null, queryString, classification, subtree, null,
includeUnpublished, languages, highlightFragments, null);
LuceneMultiSearch multiSearch = new LuceneMultiSearch(luceneIndexToolProvider, luceneSearchByDescriptionElement, luceneSearchByTaxonBase);
* @return
*/
protected LuceneSearch prepareByDescriptionElementFullTextSearch(Class<? extends CdmBase> clazz,
- String queryString, Classification classification, List<Feature> features,
+ String queryString, Classification classification, TaxonNode subtree, List<Feature> features,
List<Language> languages, boolean highlightFragments) {
LuceneSearch luceneSearch = new LuceneSearch(luceneIndexToolProvider, GroupByTaxonClassBridge.GROUPBY_TAXON_FIELD, DescriptionElementBase.class);
SortField[] sortFields = new SortField[]{SortField.FIELD_SCORE, new SortField("inDescription.taxon.titleCache__sort", SortField.Type.STRING, false)};
- BooleanQuery finalQuery = createByDescriptionElementFullTextQuery(queryString, classification, features,
+ BooleanQuery finalQuery = createByDescriptionElementFullTextQuery(queryString, classification, subtree, features,
languages, descriptionElementQueryFactory);
luceneSearch.setSortFields(sortFields);
* @param descriptionElementQueryFactory
* @return
*/
- private BooleanQuery createByDescriptionElementFullTextQuery(String queryString, Classification classification,
- List<Feature> features, List<Language> languages, QueryFactory descriptionElementQueryFactory) {
+ private BooleanQuery createByDescriptionElementFullTextQuery(String queryString,
+ Classification classification, TaxonNode subtree, List<Feature> features,
+ List<Language> languages, QueryFactory descriptionElementQueryFactory) {
+
Builder finalQueryBuilder = new Builder();
Builder textQueryBuilder = new Builder();
- textQueryBuilder.add(descriptionElementQueryFactory.newTermQuery("titleCache", queryString), Occur.SHOULD);
- // common name
- Builder nameQueryBuilder = new Builder();
- if(languages == null || languages.size() == 0){
- nameQueryBuilder.add(descriptionElementQueryFactory.newTermQuery("name", queryString), Occur.MUST);
- } else {
- Builder languageSubQueryBuilder = new Builder();
- for(Language lang : languages){
- languageSubQueryBuilder.add(descriptionElementQueryFactory.newTermQuery("language.uuid", lang.getUuid().toString(), false), Occur.SHOULD);
+ if(!StringUtils.isEmpty(queryString)){
+
+ textQueryBuilder.add(descriptionElementQueryFactory.newTermQuery("titleCache", queryString), Occur.SHOULD);
+
+ // common name
+ Builder nameQueryBuilder = new Builder();
+ if(languages == null || languages.size() == 0){
+ nameQueryBuilder.add(descriptionElementQueryFactory.newTermQuery("name", queryString), Occur.MUST);
+ } else {
+ Builder languageSubQueryBuilder = new Builder();
+ for(Language lang : languages){
+ languageSubQueryBuilder.add(descriptionElementQueryFactory.newTermQuery("language.uuid", lang.getUuid().toString(), false), Occur.SHOULD);
+ }
+ nameQueryBuilder.add(descriptionElementQueryFactory.newTermQuery("name", queryString), Occur.MUST);
+ nameQueryBuilder.add(languageSubQueryBuilder.build(), Occur.MUST);
}
- nameQueryBuilder.add(descriptionElementQueryFactory.newTermQuery("name", queryString), Occur.MUST);
- nameQueryBuilder.add(languageSubQueryBuilder.build(), Occur.MUST);
- }
- textQueryBuilder.add(nameQueryBuilder.build(), Occur.SHOULD);
+ textQueryBuilder.add(nameQueryBuilder.build(), Occur.SHOULD);
- // text field from TextData
- textQueryBuilder.add(descriptionElementQueryFactory.newMultilanguageTextQuery("text", queryString, languages), Occur.SHOULD);
+ // text field from TextData
+ textQueryBuilder.add(descriptionElementQueryFactory.newMultilanguageTextQuery("text", queryString, languages), Occur.SHOULD);
- // --- TermBase fields - by representation ----
- // state field from CategoricalData
- textQueryBuilder.add(descriptionElementQueryFactory.newDefinedTermQuery("stateData.state", queryString, languages), Occur.SHOULD);
+ // --- TermBase fields - by representation ----
+ // state field from CategoricalData
+ textQueryBuilder.add(descriptionElementQueryFactory.newDefinedTermQuery("stateData.state", queryString, languages), Occur.SHOULD);
- // state field from CategoricalData
- textQueryBuilder.add(descriptionElementQueryFactory.newDefinedTermQuery("stateData.modifyingText", queryString, languages), Occur.SHOULD);
+ // state field from CategoricalData
+ textQueryBuilder.add(descriptionElementQueryFactory.newDefinedTermQuery("stateData.modifyingText", queryString, languages), Occur.SHOULD);
- // area field from Distribution
- textQueryBuilder.add(descriptionElementQueryFactory.newDefinedTermQuery("area", queryString, languages), Occur.SHOULD);
+ // area field from Distribution
+ textQueryBuilder.add(descriptionElementQueryFactory.newDefinedTermQuery("area", queryString, languages), Occur.SHOULD);
- // status field from Distribution
- textQueryBuilder.add(descriptionElementQueryFactory.newDefinedTermQuery("status", queryString, languages), Occur.SHOULD);
+ // status field from Distribution
+ textQueryBuilder.add(descriptionElementQueryFactory.newDefinedTermQuery("status", queryString, languages), Occur.SHOULD);
+
+ finalQueryBuilder.add(textQueryBuilder.build(), Occur.MUST);
- finalQueryBuilder.add(textQueryBuilder.build(), Occur.MUST);
+ }
// --- classification ----
+
if(classification != null){
finalQueryBuilder.add(descriptionElementQueryFactory.newEntityIdQuery("inDescription.taxon.taxonNodes.classification.id", classification), Occur.MUST);
}
+ if(subtree != null){
+ finalQueryBuilder.add(descriptionElementQueryFactory.newTermQuery("inDescription.taxon.taxonNodes.treeIndex", subtree.treeIndexWc(), true), Occur.MUST);
+ }
// --- IdentifieableEntity fields - by uuid
if(features != null && features.size() > 0 ){
@Override
public List<TaxonBase> findTaxaByName(MatchingTaxonConfigurator config){
List<TaxonBase> taxonList = dao.getTaxaByName(true, config.isIncludeSynonyms(), false, false, false,
- config.getTaxonNameTitle(), null, MatchMode.EXACT, null, config.isIncludeSynonyms(), null, 0, 0, config.getPropertyPath());
+ config.getTaxonNameTitle(), null, null, MatchMode.EXACT, null, config.isIncludeSynonyms(), null, 0, 0, config.getPropertyPath());
return taxonList;
}
import eu.etaxonomy.cdm.model.location.NamedArea;\r
import eu.etaxonomy.cdm.model.taxon.Classification;\r
import eu.etaxonomy.cdm.model.taxon.TaxonBase;\r
+import eu.etaxonomy.cdm.model.taxon.TaxonNode;\r
import eu.etaxonomy.cdm.persistence.query.MatchMode;\r
import eu.etaxonomy.cdm.persistence.query.NameSearchOrder;\r
\r
private boolean doMisappliedNames = false;\r
private boolean doIncludeAuthors = false;\r
private Classification classification = null;\r
- private List<String> taxonPropertyPath;\r
+ private TaxonNode subtree = null;\r
+ private List<String> taxonPropertyPath;\r
private List<String> synonymPropertyPath;\r
private List<String> taxonNamePropertyPath;\r
private List<String> commonNamePropertyPath;\r
private Set<NamedArea> namedAreas;\r
private NameSearchOrder order;\r
\r
+\r
/**\r
* @return the taxonNamePropertyPath\r
*/\r
this.includeUnpublished = includeUnpublished;\r
}\r
\r
+ @Override\r
+ public TaxonNode getSubtree() {\r
+ return subtree;\r
+ }\r
+ @Override\r
+ public void setSubtree(TaxonNode subtree) {\r
+ this.subtree = subtree;\r
+ }\r
+\r
}\r
import eu.etaxonomy.cdm.model.location.NamedArea;\r
import eu.etaxonomy.cdm.model.taxon.Classification;\r
import eu.etaxonomy.cdm.model.taxon.TaxonBase;\r
+import eu.etaxonomy.cdm.model.taxon.TaxonNode;\r
import eu.etaxonomy.cdm.persistence.query.NameSearchOrder;\r
\r
/**\r
public Classification getClassification();\r
public void setClassification(Classification classification);\r
\r
+ public TaxonNode getSubtree();\r
+ public void setSubtree(TaxonNode subtree);\r
+\r
public Set<NamedArea> getNamedAreas();\r
public void setNamedAreas(Set<NamedArea> areas);\r
\r
\r
public Integer getPageNumber();\r
public void setPageNumber(Integer pageNumber);\r
- \r
+\r
public List<Criterion> getCriteria();\r
public void setCriteria(List<Criterion> criteria);\r
- \r
+\r
public List<Restriction<?>> getRestrictions();\r
public void setRestrictions(List<Restriction<?>> restrictions);\r
\r
* @author n.hoffmann\r
* @since 03.03.2009\r
*/\r
-public class IdentifiableServiceConfiguratorImpl<T extends IIdentifiableEntity> implements IIdentifiableEntityServiceConfigurator<T>{\r
+public class IdentifiableServiceConfiguratorImpl<T extends IIdentifiableEntity>\r
+ implements IIdentifiableEntityServiceConfigurator<T>{\r
\r
private static final long serialVersionUID = -8126736101861741087L;\r
\r
public class CdmEntityIdentifier implements Serializable {
- /**
- *
- */
private static final long serialVersionUID = 1479948194282284147L;
*/
package eu.etaxonomy.cdm.api.service.dto;
+import eu.etaxonomy.cdm.model.agent.Institution;
import eu.etaxonomy.cdm.model.occurrence.Collection;
/**
* @param institute
* @param townOrLocation
*/
- public CollectionDTO(String code, String codeStandard, String institute, String townOrLocation) {
+ public CollectionDTO(String code, String codeStandard, Institution institute, String townOrLocation) {
this.code = code;
this.codeStandard = codeStandard;
- this.institute = institute;
+ if (institute != null){
+ this.institute = institute.getTitleCache();
+ }
this.townOrLocation = townOrLocation;
}
* @param collection
*/
public CollectionDTO(Collection collection) {
- this(collection.getCode(),collection.getCodeStandard(), collection.getInstitute().getTitleCache(),collection.getTownOrLocation());
+ this(collection.getCode(),collection.getCodeStandard(), collection.getInstitute(),collection.getTownOrLocation());
}
import eu.etaxonomy.cdm.model.location.NamedArea;
import eu.etaxonomy.cdm.model.location.Point;
import eu.etaxonomy.cdm.model.occurrence.GatheringEvent;
-import eu.etaxonomy.cdm.persistence.dto.TermDto;
/**
* @author k.luther
private String locality;
private Point exactLocation;
private String country;
- private Set<TermDto> collectingAreas;
+ private Set<String> collectingAreas;
private String collectingMethod;
private Integer absoluteElevation;
private Integer absoluteElevationMax;
* @param distanceToWaterSurfaceMax
* @param distanceToWaterSurfaceText
*/
- public GatheringEventDTO(String locality, Point exactLocation, String country, Set<TermDto> collectingAreas,
+ public GatheringEventDTO(String locality, Point exactLocation, String country, Set<String> collectingAreas,
String collectingMethod, String collector, Integer absoluteElevation, Integer absoluteElevationMax,
String absoluteElevationText, Double distanceToGround, Double distanceToGroundMax,
String distanceToGroundText, Double distanceToWaterSurface, Double distanceToWaterSurfaceMax,
}
for (NamedArea area: gathering.getCollectingAreas()){
- TermDto areaDto = TermDto.fromNamedArea(area);
+ String areaString = area.getLabel();
if (dto.getCollectingAreas() == null){
dto.collectingAreas = new HashSet<>();
}
- dto.collectingAreas.add(areaDto);
+ dto.collectingAreas.add(areaString);
}
return dto;
public String getCountry() {
return country;
}
- public Set<TermDto> getCollectingAreas() {
+ public Set<String> getCollectingAreas() {
return collectingAreas;
}
public String getCollectingMethod() {
*/
public PreservedSpecimenDTO(DerivedUnit derivedUnit) {
super(derivedUnit);
- }
-
- public static PreservedSpecimenDTO newInstance(DerivedUnit derivedUnit){
- PreservedSpecimenDTO newInstance = new PreservedSpecimenDTO(derivedUnit);
-
-// newInstance.setTitleCache(derivedUnit.getTitleCache());
-
- newInstance.accessionNumber = derivedUnit.getAccessionNumber();
- newInstance.preferredStableUri = derivedUnit.getPreferredStableUri();
+ accessionNumber = derivedUnit.getAccessionNumber();
+ preferredStableUri = derivedUnit.getPreferredStableUri();
if (derivedUnit.getCollection() != null){
- newInstance.setCollectioDTo(new CollectionDTO(HibernateProxyHelper.deproxy(derivedUnit.getCollection())));
+ setCollectioDTo(new CollectionDTO(HibernateProxyHelper.deproxy(derivedUnit.getCollection())));
}
- newInstance.setBarcode(derivedUnit.getBarcode());
- newInstance.setCatalogNumber(derivedUnit.getCatalogNumber());
- newInstance.listLabel = derivedUnit.getCatalogNumber();
- newInstance.setCollectorsNumber(derivedUnit.getCollectorsNumber());
+ setBarcode(derivedUnit.getBarcode());
+ setCatalogNumber(derivedUnit.getCatalogNumber());
+ listLabel = derivedUnit.getCatalogNumber();
+ setCollectorsNumber(derivedUnit.getCollectorsNumber());
if (derivedUnit.getDerivedFrom() != null){
- newInstance.setDerivationEvent(new DerivationEventDTO(derivedUnit.getDerivedFrom() ));
+ setDerivationEvent(new DerivationEventDTO(derivedUnit.getDerivedFrom() ));
}
if (derivedUnit.getPreservation()!= null){
- newInstance.setPreservationMethod(derivedUnit.getPreservation().getMaterialMethodText());
+ setPreservationMethod(derivedUnit.getPreservation().getMaterialMethodText());
}
- newInstance.setRecordBase(derivedUnit.getRecordBasis().getMessage());
- newInstance.setSources(derivedUnit.getSources());
- newInstance.setSpecimenTypeDesignations(derivedUnit.getSpecimenTypeDesignations());
-
- return newInstance;
+ setRecordBase(derivedUnit.getRecordBasis().getMessage());
+ setSources(derivedUnit.getSources());
+ setSpecimenTypeDesignations(derivedUnit.getSpecimenTypeDesignations());
}
+
+
public String getAccessionNumber() {
return accessionNumber;
}
import java.util.HashSet;
import java.util.Set;
-import eu.etaxonomy.cdm.model.common.DefinedTerm;
import eu.etaxonomy.cdm.model.media.Media;
import eu.etaxonomy.cdm.model.molecular.Sequence;
import eu.etaxonomy.cdm.model.molecular.SequenceString;
private Media contigFile;
- private SequenceString consensusSequence = SequenceString.NewInstance();
+ private SequenceString consensusSequence;
private Boolean isBarcode = null;
private Set<SingleReadAlignment> singleReadAlignments = new HashSet<SingleReadAlignment>();
- private DefinedTerm dnaMarker;
+ private String dnaMarker;
geneticAccessionNumber = seq.getGeneticAccessionNumber();
boldProcessId = seq.getBoldProcessId();
singleReadAlignments = seq.getSingleReadAlignments();
- dnaMarker = seq.getDnaMarker();
+ dnaMarker = seq.getDnaMarker().getLabel();
haplotype = seq.getHaplotype();
citations = seq.getCitations();
try{
/**
* @return the dnaMarker
*/
- public DefinedTerm getDnaMarker() {
+ public String getDnaMarker() {
return dnaMarker;
}
--- /dev/null
+/**
+* Copyright (C) 2018 EDIT
+* European Distributed Institute of Taxonomy
+* http://www.e-taxonomy.eu
+*
+* The contents of this file are subject to the Mozilla Public License Version 1.1
+* See LICENSE.TXT at the top of this package for the full license terms.
+*/
+package eu.etaxonomy.cdm.api.service.dto;
+
+import java.util.UUID;
+
+/**
+ * @author a.kohlbecker
+ * @since Aug 31, 2018
+ *
+ */
+public class SourceDTO {
+
+ private UUID uuid;
+ String citation;
+ String citationDetail;
+
+}
private static final String SENSU_SEPARATOR = ", ";
- public class TaxonRelation{
+ public class TaxonRelationDTO{
- private UUID relationUuid;
private boolean doubtful = false;
private boolean misapplication = false;
private boolean synonym = false;
//TODO maybe this will be changed in future
private TermDto type;
private UUID typeUuid;
+ private SourceDTO sec;
+ private SourceDTO relSec;
- public TaxonRelation(TaxonRelationship relation, Direction direction, List<Language> languages) {
+ public TaxonRelationDTO(TaxonRelationship relation, Direction direction, List<Language> languages) {
Taxon relatedTaxon = direction == Direction.relatedTo? relation.getToTaxon()
: relation.getFromTaxon();
this.taxonUuid = relatedTaxon.getUuid();
this.doubtful = relation.isDoubtful();
- this.relationUuid = relation.getUuid();
this.direction = direction;
TaxonRelationshipType relType = relation.getType();
public void setDoubtful(boolean doubtful) {
this.doubtful = doubtful;
}
- public UUID getRelationUuid() {
- return relationUuid;
- }
- public void setRelationUuid(UUID relationUuid) {
- this.relationUuid = relationUuid;
- }
public Direction getDirection() {
return direction;
}
- private List<TaxonRelation> relations = new ArrayList<>();
+ private List<TaxonRelationDTO> relations = new ArrayList<>();
private List<List<TaggedText>> misapplications = new ArrayList<>();
// ************************** GETTER / SETTER ***********************/
- public List<TaxonRelation> getRelations() {
+ public List<TaxonRelationDTO> getRelations() {
return relations;
}
- public void setIncludedTaxa(List<TaxonRelation> relations) {
+ public void setIncludedTaxa(List<TaxonRelationDTO> relations) {
this.relations = relations;
}
- public void addRelation(TaxonRelation relation){
+ public void addRelation(TaxonRelationDTO relation){
relations.add(relation);
}
* @param relation
* @param direction
*/
- public TaxonRelation addRelation(TaxonRelationship relation, Direction direction, List<Language> languages) {
- TaxonRelation newRelation = new TaxonRelation(relation, direction, languages);
+ public TaxonRelationDTO addRelation(TaxonRelationship relation, Direction direction, List<Language> languages) {
+ TaxonRelationDTO newRelation = new TaxonRelationDTO(relation, direction, languages);
relations.add(newRelation);
return newRelation;
}
public void createMisapplicationString() {
List<List<TaggedText>> result = new ArrayList<>();
- for (TaxonRelation relation: relations){
+ for (TaxonRelationDTO relation: relations){
if (relation.isMisapplication()){
List<TaggedText> tags = relation.getTaggedText();
}
//
// public boolean contains(UUID taxonUuid) {
-// for (TaxonRelation relation: relations){
+// for (TaxonRelationDTO relation: relations){
// if (taxon.taxonUuid.equals(taxonUuid)){
// return true;
// }
@Override
public String toString(){
String result = "";
- for (TaxonRelation relation : relations){
+ for (TaxonRelationDTO relation : relations){
result += relation.toString() + ",";
}
if (result.length() > 0){
import eu.etaxonomy.cdm.model.name.TypeDesignationBase;
/**
- * @author pplitzner
- * @since May 3, 2018
+ * @author k.luther
+ * @since 06.09.2018
*
*/
public class TypeDesignationComparator implements Comparator<TypeDesignationBase> {
+
private TypeDesignationStatusComparator statusComparator = new TypeDesignationStatusComparator();
@SuppressWarnings("unchecked")
@Override
public int compare(TypeDesignationBase o1, TypeDesignationBase o2) {
+
if(o1==null){
return 1;
}
if(o1.getTypeStatus()==null){
return 1;
}
- if(o2.getTypeStatus()==null){
+ if(o2.getUuid()==null){
return-1;
}
return statusComparator.compare(o1.getTypeStatus(), o2.getTypeStatus()) ;
}
-}
+}
\ No newline at end of file
package eu.etaxonomy.cdm.api.service.name;
import java.util.ArrayList;
-import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
+import java.util.UUID;
import org.apache.commons.lang3.StringUtils;
import org.hibernate.search.hcore.util.impl.HibernateHelper;
import eu.etaxonomy.cdm.model.common.IdentifiableSource;
import eu.etaxonomy.cdm.model.common.TermVocabulary;
import eu.etaxonomy.cdm.model.common.VersionableEntity;
+import eu.etaxonomy.cdm.model.name.HomotypicalGroup;
import eu.etaxonomy.cdm.model.name.NameTypeDesignation;
import eu.etaxonomy.cdm.model.name.SpecimenTypeDesignation;
import eu.etaxonomy.cdm.model.name.TaxonName;
import eu.etaxonomy.cdm.model.occurrence.FieldUnit;
import eu.etaxonomy.cdm.model.occurrence.MediaSpecimen;
import eu.etaxonomy.cdm.model.occurrence.SpecimenOrObservationBase;
+import eu.etaxonomy.cdm.model.reference.Reference;
import eu.etaxonomy.cdm.ref.EntityReference;
import eu.etaxonomy.cdm.ref.TypedEntityReference;
import eu.etaxonomy.cdm.strategy.cache.TagEnum;
import eu.etaxonomy.cdm.strategy.cache.TaggedText;
import eu.etaxonomy.cdm.strategy.cache.TaggedTextBuilder;
+import eu.etaxonomy.cdm.strategy.cache.reference.DefaultReferenceCacheStrategy;
/**
* Manages a collection of {@link TypeDesignationBase TypeDesignations} for the same typified name.
*
private static final String TYPE_SEPARATOR = "; ";
private static final String TYPE_DESIGNATION_SEPARATOR = ", ";
-
- private Collection<TypeDesignationBase> typeDesignations;
+ private static final String TYPE_STATUS_SEPARATOR_WITHCITATION = ": ";
+ private static final String TYPE_STATUS_PARENTHESIS_LEFT = " (";
+ private static final String TYPE_STATUS_PARENTHESIS_RIGHT = ")";
+ private static final String REFERENCE_PARENTHESIS_RIGHT = "]";
+ private static final String REFERENCE_PARENTHESIS_LEFT = " [";
+ private static final String REFERENCE_FIDE = "fide ";
+ private Map<UUID,TypeDesignationBase> typeDesignations;
private NameTypeBaseEntityType nameTypeBaseEntityType = NameTypeBaseEntityType.NAME_TYPE_DESIGNATION;
private List<String> problems = new ArrayList<>();
private boolean printCitation = false;
+ private boolean useShortCitation = false;
private List<TaggedText> taggedText;
*
*/
public TypeDesignationSetManager(Collection<TypeDesignationBase> typeDesignations) throws RegistrationValidationException {
- this.typeDesignations = typeDesignations;
+ if (this.typeDesignations == null){
+ this.typeDesignations = new HashMap();
+ }
+ for (TypeDesignationBase typeDes:typeDesignations){
+ this.typeDesignations.put(typeDes.getUuid(), typeDes);
+ }
findTypifiedName();
mapAndSort();
}
+ /**
+ * @param containgEntity
+ * @param taxonName
+ * @throws RegistrationValidationException
+ *
+ */
+ public TypeDesignationSetManager(HomotypicalGroup group) throws RegistrationValidationException {
+ if (this.typeDesignations == null){
+ this.typeDesignations = new HashMap();
+ }
+ for (TypeDesignationBase typeDes:group.getTypeDesignations()){
+ this.typeDesignations.put(typeDes.getUuid(), typeDes);
+ }
+ //findTypifiedName();
+ mapAndSort();
+ }
+
/**
* @param typifiedName2
*/
public TypeDesignationSetManager(TaxonName typifiedName) {
- this.typeDesignations = new ArrayList<>();
+ this.typeDesignations = new HashMap<>();
this.typifiedNameRef = new EntityReference(typifiedName.getUuid(), typifiedName.getTitleCache());
}
* @param typeDesignations
*/
public void addTypeDesigations(CdmBase containgEntity, TypeDesignationBase ... typeDesignations){
- this.typeDesignations.addAll(Arrays.asList(typeDesignations));
+ for (TypeDesignationBase typeDes: typeDesignations){
+ this.typeDesignations.put(typeDes.getUuid(), typeDes);
+ }
mapAndSort();
}
protected void mapAndSort() {
finalString = null;
Map<TypedEntityReference, TypeDesignationWorkingSet> byBaseEntityByTypeStatus = new HashMap<>();
- this.typeDesignations.forEach(td -> mapTypeDesignation(byBaseEntityByTypeStatus, td));
+
+ this.typeDesignations.values().forEach(td -> mapTypeDesignation(byBaseEntityByTypeStatus, td));
orderedByTypesByBaseEntity = orderByTypeByBaseEntity(byBaseEntityByTypeStatus);
}
td.getUuid(),
stringify(td));
- TypeDesignationWorkingSet typedesignationWorkingSet;
if(!byBaseEntityByTypeStatus.containsKey(baseEntityReference)){
byBaseEntityByTypeStatus.put(baseEntityReference, new TypeDesignationWorkingSet(baseEntity, baseEntityReference));
}
+ byBaseEntityByTypeStatus.get(baseEntityReference).insert(status, typeDesignationEntityReference);
- typedesignationWorkingSet = byBaseEntityByTypeStatus.get(baseEntityReference);
- typedesignationWorkingSet.insert(status, typeDesignationEntityReference);
} catch (DataIntegrityException e){
problems.add(e.getMessage());
}
workingsetBuilder.add(TagEnum.separator, TYPE_SEPARATOR);
}
boolean isNameTypeDesignation = false;
- if(SpecimenOrObservationBase.class.isAssignableFrom(baseEntityRef.getType())){
+ if(SpecimenOrObservationBase.class.isAssignableFrom(baseEntityRef.getType()) ){
workingsetBuilder.add(TagEnum.label, "Type:");
- } else {
+ } else{
workingsetBuilder.add(TagEnum.label, "NameType:");
isNameTypeDesignation = true;
}
for(TypeDesignationStatusBase<?> typeStatus : typeDesignationWorkingSet.keySet()) {
if(typeStatusCount++ > 0){
workingsetBuilder.add(TagEnum.separator, TYPE_STATUS_SEPARATOR);
+
}
boolean isPlural = typeDesignationWorkingSet.get(typeStatus).size() > 1;
if(!typeStatus.equals(NULL_STATUS)) {
+
workingsetBuilder.add(TagEnum.label, typeStatus.getLabel() + (isPlural ? "s:" : ","));
+ }
+
+
+ int typeDesignationCount = 0;
+ for(TypedEntityReference typeDesignationEntityReference : createSortedList(typeDesignationWorkingSet, typeStatus)) {
+
+ if(typeDesignationCount++ > 0){
+ workingsetBuilder.add(TagEnum.separator, TYPE_DESIGNATION_SEPARATOR);
+ }
+
+ workingsetBuilder.add(TagEnum.typeDesignation, typeDesignationEntityReference.getLabel(), typeDesignationEntityReference);
+
+ }
+
+ }
+ typeDesignationWorkingSet.setRepresentation(workingsetBuilder.toString());
+ finalString += typeDesignationWorkingSet.getRepresentation();
+ finalBuilder.addAll(workingsetBuilder);
+ }
+ }
+ finalString = finalString.trim();
+ taggedText = finalBuilder.getTaggedText();
+ }
+ }
+
+ public void buildStringWithCitation(){
+
+ if(finalString == null){
+
+ TaggedTextBuilder finalBuilder = new TaggedTextBuilder();
+ finalString = "";
+
+ if(getTypifiedNameCache() != null){
+ finalString += getTypifiedNameCache() + " ";
+ finalBuilder.add(TagEnum.name, getTypifiedNameCache(), new TypedEntityReference<>(TaxonName.class, getTypifiedNameRef().getUuid()));
+ }
+
+ int typeCount = 0;
+ if(orderedByTypesByBaseEntity != null){
+ for(TypedEntityReference baseEntityRef : orderedByTypesByBaseEntity.keySet()) {
+
+ TaggedTextBuilder workingsetBuilder = new TaggedTextBuilder();
+ if(typeCount++ > 0){
+ workingsetBuilder.add(TagEnum.separator, TYPE_SEPARATOR);
+ }
+ boolean isNameTypeDesignation = false;
+
+ if(!baseEntityRef.getLabel().isEmpty()){
+ workingsetBuilder.add(TagEnum.specimenOrObservation, baseEntityRef.getLabel(), baseEntityRef);
+ }
+ TypeDesignationWorkingSet typeDesignationWorkingSet = orderedByTypesByBaseEntity.get(baseEntityRef);
+ int typeStatusCount = 0;
+ for(TypeDesignationStatusBase<?> typeStatus : typeDesignationWorkingSet.keySet()) {
+ if(typeStatusCount++ > 0){
+ workingsetBuilder.add(TagEnum.separator, TYPE_STATUS_SEPARATOR);
+
}
+ boolean isPlural = typeDesignationWorkingSet.get(typeStatus).size() > 1;
+ if(!typeStatus.equals(NULL_STATUS)) {
+ workingsetBuilder.add(TagEnum.separator, TYPE_STATUS_PARENTHESIS_LEFT);
+ workingsetBuilder.add(TagEnum.label, typeStatus.getLabel() + (isPlural ? "s:" : ":"));
+ }
int typeDesignationCount = 0;
- for(TypedEntityReference typeDesignationEntityReference : typeDesignationWorkingSet.get(typeStatus)) {
+ for(TypedEntityReference typeDesignationEntityReference : createSortedList(typeDesignationWorkingSet, typeStatus)) {
if(typeDesignationCount++ > 0){
- workingsetBuilder.add(TagEnum.separator, TYPE_DESIGNATION_SEPARATOR);
+ workingsetBuilder.add(TagEnum.separator, TYPE_DESIGNATION_SEPARATOR);
}
+
workingsetBuilder.add(TagEnum.typeDesignation, typeDesignationEntityReference.getLabel(), typeDesignationEntityReference);
+
+ TypeDesignationBase typeDes = typeDesignations.get(typeDesignationEntityReference.getUuid());
+ if (typeDes.getCitation() != null){
+ // workingsetBuilder.add(TagEnum.separator, REFERENCE_PARENTHESIS_LEFT);
+ String shortCitation = ((DefaultReferenceCacheStrategy)typeDes.getCitation().getCacheStrategy()).createShortCitation(typeDes.getCitation());
+ workingsetBuilder.add(TagEnum.reference, shortCitation, typeDesignationEntityReference);
+ //workingsetBuilder.add(TagEnum.separator, REFERENCE_PARENTHESIS_RIGHT);
+ }
+
+ if ((!typeStatus.equals(NULL_STATUS)) &&(typeDesignationCount == typeDesignationWorkingSet.get(typeStatus).size())){
+ workingsetBuilder.add(TagEnum.separator, TYPE_STATUS_PARENTHESIS_RIGHT);
+ }
}
+
}
typeDesignationWorkingSet.setRepresentation(workingsetBuilder.toString());
finalString += typeDesignationWorkingSet.getRepresentation();
}
}
+ /**
+ * @param typeDesignationWorkingSet
+ * @param typeStatus
+ * @return
+ */
+ private List<TypedEntityReference<TypeDesignationBase>> createSortedList(
+ TypeDesignationWorkingSet typeDesignationWorkingSet, TypeDesignationStatusBase<?> typeStatus) {
+ List<TypedEntityReference<TypeDesignationBase>> typeDesignationEntityrReferences = new ArrayList(typeDesignationWorkingSet.get(typeStatus));
+ Collections.sort(typeDesignationEntityrReferences, new TypedEntityComparator());
+ return typeDesignationEntityrReferences;
+ }
+
+
/**
* FIXME use the validation framework validators to store the validation problems!!!
*
TaxonName typifiedName = null;
- for(TypeDesignationBase<?> typeDesignation : typeDesignations){
+ for(TypeDesignationBase<?> typeDesignation : typeDesignations.values()){
typeDesignation.getTypifiedNames();
if(typeDesignation.getTypifiedNames().isEmpty()){
* @return
*/
public Collection<TypeDesignationBase> getTypeDesignations() {
- return typeDesignations;
+ return typeDesignations.values();
}
/**
* @return
*/
public TypeDesignationBase findTypeDesignation(EntityReference typeDesignationRef) {
- for(TypeDesignationBase td : typeDesignations){
- if(td.getUuid().equals(typeDesignationRef.getUuid())){
- return td;
- }
- }
- // TODO Auto-generated method stub
- return null;
+ return this.typeDesignations.get(typeDesignationRef.getUuid());
}
if(msp.getMediaSpecimen() != null){
for(IdentifiableSource source : msp.getMediaSpecimen().getSources()){
String refDetailStr = source.getCitationMicroReference();
- String referenceStr = source.getCitation().getTitleCache();
+ String referenceStr = source.getCitation() == null? "": source.getCitation().getTitleCache();
if(StringUtils.isNotBlank(source.getCitationMicroReference())){
typeSpecimenTitle += refDetailStr;
}
} else {
DerivedUnitFacadeCacheStrategy cacheStrategy = new DerivedUnitFacadeCacheStrategy();
typeSpecimenTitle += cacheStrategy.getTitleCache(du, true);
+
}
result += (isMediaSpecimen ? "[icon] " : "") + typeSpecimenTitle.trim();
}
if(isPrintCitation() && td.getCitation() != null){
- if(td.getCitation().getAbbrevTitle() != null){
- result += " " + td.getCitation().getAbbrevTitle();
+ Reference citation = HibernateProxyHelper.deproxy(td.getCitation(), Reference.class);
+ if(citation.getAbbrevTitle() != null){
+
+ result += " " + citation.getAbbrevTitle();
} else {
- result += " " + td.getCitation().getTitleCache();
+ result += " " + citation.getTitleCache();
}
if(td.getCitationMicroReference() != null){
result += " :" + td.getCitationMicroReference();
return taggedText;
}
+ public List<TaggedText> toTaggedTextWithCitation() {
+ buildStringWithCitation();
+ return taggedText;
+ }
+
+
/**
* @return the printCitation
*/
return nameTypeBaseEntityType;
}
+ public boolean isUseShortCitation() {
+ return useShortCitation;
+ }
+
+ public void setUseShortCitation(boolean useShortCitation) {
+ this.useShortCitation = useShortCitation;
+ }
+
/**
* TypeDesignations which refer to the same FieldUnit (SpecimenTypeDesignation) or TaxonName
* (NameTypeDesignation) form a working set. The <code>TypeDesignationWorkingSet</code> internally
return typeDesignations;
}
+
+
/**
* @param status
* @param typeDesignationEntityReference
--- /dev/null
+/**
+* Copyright (C) 2018 EDIT
+* European Distributed Institute of Taxonomy
+* http://www.e-taxonomy.eu
+*
+* The contents of this file are subject to the Mozilla Public License Version 1.1
+* See LICENSE.TXT at the top of this package for the full license terms.
+*/
+package eu.etaxonomy.cdm.api.service.name;
+
+import java.util.Comparator;
+
+import eu.etaxonomy.cdm.model.name.TypeDesignationBase;
+import eu.etaxonomy.cdm.ref.TypedEntityReference;
+
+/**
+ * @author pplitzner
+ * @since May 3, 2018
+ *
+ */
+public class TypedEntityComparator implements Comparator<TypedEntityReference<TypeDesignationBase> >{
+
+
+
+ @Override
+ @SuppressWarnings("unchecked")
+ public int compare(TypedEntityReference<TypeDesignationBase> o1, TypedEntityReference<TypeDesignationBase> o2) {
+ if(o1==null){
+ return 1;
+ }
+ if(o2==null){
+ return -1;
+ }
+ if(o1.getUuid()==null){
+ return 1;
+ }
+ if(o2.getUuid()==null){
+ return-1;
+ }
+
+ return o1.getLabel().compareTo(o2.getLabel());
+ }
+}
if(!queryParsers.containsKey(clazz)){
Analyzer analyzer = getAnalyzerFor(clazz);
QueryParser parser = new QueryParser(DEFAULT_QURERY_FIELD_NAME, analyzer);
+ parser.setAllowLeadingWildcard(true);
queryParsers.put(clazz, parser);
}
return queryParsers.get(clazz);
if(!complexPhraseQueryParsers.containsKey(clazz)){
Analyzer analyzer = getAnalyzerFor(clazz);
QueryParser parser = new ComplexPhraseQueryParser(DEFAULT_QURERY_FIELD_NAME, analyzer);
+ parser.setAllowLeadingWildcard(true);
complexPhraseQueryParsers.put(clazz, parser);
}
return complexPhraseQueryParsers.get(clazz);
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TopDocs;
-import org.apache.lucene.search.WildcardQuery;
import org.apache.lucene.search.join.JoinUtil;
import org.apache.lucene.search.join.ScoreMode;
import org.hibernate.search.engine.ProjectionConstants;
-import org.hibernate.search.spatial.impl.Point;
import org.hibernate.search.spatial.impl.Rectangle;
-import eu.etaxonomy.cdm.hibernate.search.DefinedTermBaseClassBridge;
-import eu.etaxonomy.cdm.hibernate.search.MultilanguageTextFieldBridge;
import eu.etaxonomy.cdm.hibernate.search.NotNullAwareIdBridge;
import eu.etaxonomy.cdm.model.common.CdmBase;
import eu.etaxonomy.cdm.model.common.IdentifiableEntity;
* Creates a new Term query. Depending on whether <code>isTextField</code> is set true or not the
* supplied <code>queryString</code> will be parsed by using the according analyzer or not.
* Setting <code>isTextField</code> to <code>false</code> is useful for searching for uuids etc.
+ * <p>
+ * The appropriate query type is determined by the query strnig:
+ * <ul>
+ * <li>Lactuca ==> TermQuery.class </li>
+ * <li>Lactuca perennis ==> BooleanQuery.class </li>
+ * <li>Lactu* ==> PrefixQuery.class</li>
+ * <li>"Lactuca perennis" ==> PhraseQuery.class</li>
+ * </ul>
+ *
*
* @param fieldName
* @param queryString
String luceneQueryString = fieldName + ":(" + queryString + ")";
if (isTextField) {
queryString = queryString.trim();
- boolean isComplexPhraseQuery = queryString.matches("^\\\".*\\s+.*[\\*].*\\\"$");
+ // ^\"(.*\s+.*[\*].*|.*[\*].*\s+.*)\"$ matches phrase query strings with wildcards like '"Lactuca per*"'
+ boolean isComplexPhraseQuery = queryString.matches("^\\\"(.*\\s+.*[\\*].*|.*[\\*].*\\s+.*)\\\"$");
textFieldNames.add(fieldName);
- // in order to support the full query syntax we must use the parser
- // here
+ // in order to support the full query syntax we must use the parser here
try {
- return toolProvider.getQueryParserFor(cdmBaseType, isComplexPhraseQuery).parse(luceneQueryString);
+ Query termQuery = toolProvider.getQueryParserFor(cdmBaseType, isComplexPhraseQuery).parse(luceneQueryString);
+ return termQuery;
} catch (ParseException e) {
logger.error(e);
}
*/
public class AbstractRelationshipEdge<T extends RelationshipTermBase> {
- private Set<T> taxonRelationshipTypes;
+ private Set<T> relationshipTypes;
private EnumSet<Direction> directions;
- public AbstractRelationshipEdge(T taxonRelationshipType, Direction ... direction) {
+ public AbstractRelationshipEdge(T relationshipType, Direction ... direction) {
super();
- this.taxonRelationshipTypes = new HashSet<>();
- this.taxonRelationshipTypes.add(taxonRelationshipType);
+ this.relationshipTypes = new HashSet<>();
+ this.relationshipTypes.add(relationshipType);
directions = EnumSet.copyOf(Arrays.asList(direction));
}
- public AbstractRelationshipEdge(Set<T> taxonRelationshipTypes, Direction ... direction) {
+ public AbstractRelationshipEdge(Set<T> relationshipTypes, Direction ... direction) {
super();
- this.taxonRelationshipTypes = taxonRelationshipTypes;
+ this.relationshipTypes = relationshipTypes;
directions = EnumSet.copyOf(Arrays.asList(direction));
}
- public Set<T> getTaxonRelationshipTypes() {
- return taxonRelationshipTypes;
+ public Set<T> getRelationshipTypes() {
+ return relationshipTypes;
}
- public void setTaxonRelationshipTypes(Set<T> taxonRelationshipTypes) {
- this.taxonRelationshipTypes = taxonRelationshipTypes;
+ public void setRelationshipTypes(Set<T> relationshipTypes) {
+ this.relationshipTypes = relationshipTypes;
}
public EnumSet<Direction> getDirections() {
}
- /**
- * Test method for {@link eu.etaxonomy.cdm.api.service.ClassificationServiceImpl#loadRankSpecificRootNodes(eu.etaxonomy.cdm.model.taxon.Classification, eu.etaxonomy.cdm.model.name.Rank, java.util.List)}.
- */
@Test
@DataSet
public final void testListRankSpecificRootNodes(){
//
// for more historic Acacia taxonomy see http://lexikon.freenet.de/Akazien
- List<TaxonNode> taxonNodes = service.listRankSpecificRootNodes(null, null, includeUnpublished, null, null, NODE_INIT_STRATEGY);
+ List<TaxonNode> taxonNodes = service.listRankSpecificRootNodes(null, null, null, includeUnpublished, null, null, NODE_INIT_STRATEGY);
Assert.assertEquals(2, taxonNodes.size());
- taxonNodes = service.listRankSpecificRootNodes(classification, null, includeUnpublished, null, null, NODE_INIT_STRATEGY);
+ taxonNodes = service.listRankSpecificRootNodes(classification, null, null, includeUnpublished, null, null, NODE_INIT_STRATEGY);
Assert.assertEquals(2, taxonNodes.size());
- taxonNodes = service.listRankSpecificRootNodes(classification, Rank.SECTION_BOTANY(), includeUnpublished, null, null, NODE_INIT_STRATEGY);
+ taxonNodes = service.listRankSpecificRootNodes(classification, null, Rank.SECTION_BOTANY(), includeUnpublished, null, null, NODE_INIT_STRATEGY);
Assert.assertEquals(4, taxonNodes.size());
// also test if the pager works
- taxonNodes = service.listRankSpecificRootNodes(classification, Rank.SECTION_BOTANY(), includeUnpublished, 10, 0, NODE_INIT_STRATEGY);
+ taxonNodes = service.listRankSpecificRootNodes(classification, null, Rank.SECTION_BOTANY(), includeUnpublished, 10, 0, NODE_INIT_STRATEGY);
Assert.assertEquals(4, taxonNodes.size());
- taxonNodes = service.listRankSpecificRootNodes(classification, Rank.SECTION_BOTANY(), includeUnpublished, 2, 0, NODE_INIT_STRATEGY);
+ taxonNodes = service.listRankSpecificRootNodes(classification, null, Rank.SECTION_BOTANY(), includeUnpublished, 2, 0, NODE_INIT_STRATEGY);
Assert.assertEquals(2, taxonNodes.size());
- taxonNodes = service.listRankSpecificRootNodes(classification, Rank.SECTION_BOTANY(), includeUnpublished, 2, 1, NODE_INIT_STRATEGY);
+ taxonNodes = service.listRankSpecificRootNodes(classification, null, Rank.SECTION_BOTANY(), includeUnpublished, 2, 1, NODE_INIT_STRATEGY);
Assert.assertEquals(2, taxonNodes.size());
- taxonNodes = service.listRankSpecificRootNodes(classification, Rank.SECTION_BOTANY(), includeUnpublished, 2, 2, NODE_INIT_STRATEGY);
+ taxonNodes = service.listRankSpecificRootNodes(classification, null, Rank.SECTION_BOTANY(), includeUnpublished, 2, 2, NODE_INIT_STRATEGY);
Assert.assertEquals(0, taxonNodes.size());
- taxonNodes = service.listRankSpecificRootNodes(classification, Rank.SPECIES(), includeUnpublished, null, null, NODE_INIT_STRATEGY);
+ taxonNodes = service.listRankSpecificRootNodes(classification, null, Rank.SPECIES(), includeUnpublished, null, null, NODE_INIT_STRATEGY);
Assert.assertEquals(3, taxonNodes.size());
// also test if the pager works
- taxonNodes = service.listRankSpecificRootNodes(classification, Rank.SPECIES(), includeUnpublished, 10, 0, NODE_INIT_STRATEGY);
+ taxonNodes = service.listRankSpecificRootNodes(classification, null, Rank.SPECIES(), includeUnpublished, 10, 0, NODE_INIT_STRATEGY);
Assert.assertEquals(3, taxonNodes.size());
- taxonNodes = service.listRankSpecificRootNodes(classification, Rank.SPECIES(), includeUnpublished, 2, 1, NODE_INIT_STRATEGY);
+ taxonNodes = service.listRankSpecificRootNodes(classification, null, Rank.SPECIES(), includeUnpublished, 2, 1, NODE_INIT_STRATEGY);
Assert.assertEquals(1, taxonNodes.size());
import java.util.Set;
import java.util.UUID;
-import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import org.junit.Assert;
import org.junit.Ignore;
List<Restriction<?>> restrictions;
Pager<TaxonName> result;
- Logger.getLogger("org.hibernate.SQL").setLevel(Level.TRACE);
+ //Logger.getLogger("org.hibernate.SQL").setLevel(Level.TRACE);
restrictions = Arrays.asList(new Restriction<String>("typeDesignations.typeName.titleCache", Operator.AND, null, "Name1"));
result = nameService.findByTitleWithRestrictions(null, "Name3", MatchMode.EXACT, restrictions, null, null, null, null);
private static final UUID ABIES_ALBA_UUID = UUID.fromString("7dbd5810-a3e5-44b6-b563-25152b8867f4");\r
private static final UUID CLASSIFICATION_UUID = UUID.fromString("2a5ceebb-4830-4524-b330-78461bf8cb6b");\r
private static final UUID CLASSIFICATION_ALT_UUID = UUID.fromString("d7c741e3-ae9e-4a7d-a566-9e3a7a0b51ce");\r
- private static final UUID D_ABIES_BALSAMEA_UUID = UUID.fromString("900108d8-e6ce-495e-b32e-7aad3099135e");\r
- private static final UUID D_ABIES_ALBA_UUID = UUID.fromString("ec8bba03-d993-4c85-8472-18b14942464b");\r
- private static final UUID D_ABIES_KAWAKAMII_SEC_KOMAROV_UUID = UUID.fromString("e9d8c2fd-6409-46d5-9c2e-14a2bbb1b2b1");\r
private static final UUID ABIES_SUBALPINA_UUID = UUID.fromString("9fee273c-c819-4f1f-913a-cd910465df51");\r
private static final UUID ABIES_LASIOCARPA_UUID = UUID.fromString("9ce1fecf-c1ad-4127-be01-85d5d9f847ce");\r
\r
+ private static final UUID ROOTNODE_CLASSIFICATION_5000 = UUID.fromString("a8266e45-091f-432f-87ae-c625e6aa9bbc");\r
+\r
+ private static final UUID DESC_ABIES_BALSAMEA_UUID = UUID.fromString("900108d8-e6ce-495e-b32e-7aad3099135e");\r
+ private static final UUID DESC_ABIES_ALBA_UUID = UUID.fromString("ec8bba03-d993-4c85-8472-18b14942464b");\r
+ private static final UUID DESC_ABIES_KAWAKAMII_SEC_KOMAROV_UUID = UUID.fromString("e9d8c2fd-6409-46d5-9c2e-14a2bbb1b2b1");\r
+\r
+\r
private static final int NUM_OF_NEW_RADOM_ENTITIES = 1000;\r
\r
private boolean includeUnpublished = true;\r
@SpringBeanByType\r
private INameService nameService;\r
@SpringBeanByType\r
- private ICdmMassIndexer indexer;\r
+ private ITaxonNodeService nodeService;\r
\r
@SpringBeanByType\r
- private ITaxonNodeService nodeService;\r
+ private ICdmMassIndexer indexer;\r
+\r
\r
private static final int BENCHMARK_ROUNDS = 300;\r
\r
public final void testPurgeAndReindex() throws IOException, LuceneParseException {\r
\r
refreshLuceneIndex();\r
+ TaxonNode subtree = null;\r
\r
- Pager<SearchResult<TaxonBase>> pager = taxonService.findByFullText(null, "Abies", null, includeUnpublished,\r
- null, true, null, null, null, null); // --> 8\r
+ Pager<SearchResult<TaxonBase>> pager = taxonService.findByFullText(null, "Abies", null, subtree,\r
+ includeUnpublished, null, true, null, null, null, null); // --> 8\r
Assert.assertEquals("Expecting 8 entities", 8, pager.getCount().intValue());\r
\r
indexer.purge(null);\r
commitAndStartNewTransaction(null);\r
\r
- pager = taxonService.findByFullText(null, "Abies", null, includeUnpublished, null, true, null, null, null, null); // --> 0\r
+ pager = taxonService.findByFullText(null, "Abies", null, subtree, includeUnpublished, null, true, null, null, null, null); // --> 0\r
Assert.assertEquals("Expecting no entities since the index has been purged", 0, pager.getCount().intValue());\r
\r
indexer.reindex(indexer.indexedClasses(), null);\r
commitAndStartNewTransaction(null);\r
\r
- pager = taxonService.findByFullText(null, "Abies", null, includeUnpublished, null, true, null, null, null, null); // --> 8\r
+ pager = taxonService.findByFullText(null, "Abies", null, subtree, includeUnpublished, null, true, null, null, null, null); // --> 8\r
Assert.assertEquals("Expecting 8 entities", 8, pager.getCount().intValue());\r
}\r
\r
public final void testFindByDescriptionElementFullText_CommonName() throws IOException,\r
LuceneParseException {\r
\r
+ TaxonNode subtree = null;\r
refreshLuceneIndex();\r
\r
- Pager<SearchResult<TaxonBase>> pager = taxonService.findByDescriptionElementFullText(CommonTaxonName.class, "Wei"+UTF8.SHARP_S+"tanne", null, null, null,\r
+ Pager<SearchResult<TaxonBase>> pager = taxonService.findByDescriptionElementFullText(CommonTaxonName.class, "Wei"+UTF8.SHARP_S+"tanne",\r
+ null, subtree, null, null,\r
false, null, null, null, null);\r
Assert.assertEquals("Expecting one entity when searching for CommonTaxonName", 1,\r
pager.getCount().intValue());\r
\r
// the description containing the Nulltanne has no taxon attached,\r
// taxon.id = null\r
- pager = taxonService.findByDescriptionElementFullText(CommonTaxonName.class, "Nulltanne", null, null, null,\r
+ pager = taxonService.findByDescriptionElementFullText(CommonTaxonName.class, "Nulltanne", null, subtree, null, null,\r
false, null, null, null, null);\r
Assert.assertEquals("Expecting no entity when searching for 'Nulltanne' ", 0, pager.getCount().intValue());\r
\r
- pager = taxonService.findByDescriptionElementFullText(CommonTaxonName.class, "Wei"+UTF8.SHARP_S+"tanne", null, null,\r
+ pager = taxonService.findByDescriptionElementFullText(CommonTaxonName.class, "Wei"+UTF8.SHARP_S+"tanne", null, subtree, null,\r
Arrays.asList(new Language[] { Language.GERMAN() }), false, null, null, null, null);\r
Assert.assertEquals("Expecting one entity when searching in German", 1, pager.getCount().intValue());\r
\r
- pager = taxonService.findByDescriptionElementFullText(CommonTaxonName.class, "Wei"+UTF8.SHARP_S+"tanne", null, null,\r
+ pager = taxonService.findByDescriptionElementFullText(CommonTaxonName.class, "Wei"+UTF8.SHARP_S+"tanne", null, subtree, null,\r
Arrays.asList(new Language[] { Language.RUSSIAN() }), false, null, null, null, null);\r
Assert.assertEquals("Expecting no entity when searching in Russian", 0, pager.getCount().intValue());\r
\r
public final void testFindByDescriptionElementFullText_Distribution() throws IOException, LuceneParseException {\r
\r
refreshLuceneIndex();\r
+ TaxonNode subtree = null;\r
\r
// by Area\r
- Pager<SearchResult<TaxonBase>> pager = taxonService.findByDescriptionElementFullText(null, "Canada", null, null, null, false, null, null, null, null);\r
+ Pager<SearchResult<TaxonBase>> pager = taxonService.findByDescriptionElementFullText(null, "Canada", null, subtree, null, null, false, null, null, null, null);\r
Assert.assertEquals("Expecting one entity when searching for arae 'Canada'", 1, pager.getCount().intValue());\r
// by Status\r
- pager = taxonService.findByDescriptionElementFullText(null, "present", null, null, null, false, null, null, null, null);\r
+ pager = taxonService.findByDescriptionElementFullText(null, "present", null, subtree, null, null, false, null, null, null, null);\r
Assert.assertEquals("Expecting one entity when searching for status 'present'", 1, pager.getCount().intValue());\r
}\r
\r
public final void testFindByDescriptionElementFullText_wildcard() throws IOException, LuceneParseException {\r
\r
refreshLuceneIndex();\r
+ TaxonNode subtree = null;\r
\r
- Pager<SearchResult<TaxonBase>> pager = taxonService.findByDescriptionElementFullText(CommonTaxonName.class, "Wei"+UTF8.SHARP_S+"*", null, null, null, false, null, null, null, null);\r
+ Pager<SearchResult<TaxonBase>> pager = taxonService.findByDescriptionElementFullText(CommonTaxonName.class, "Wei"+UTF8.SHARP_S+"*", null, subtree, null, null, false, null, null, null, null);\r
Assert.assertEquals("Expecting one entity when searching for CommonTaxonName", 1, pager.getCount().intValue());\r
}\r
\r
@DataSet\r
public final void testFindByDescriptionElementFullText_TooManyClauses() throws IOException, LuceneParseException {\r
\r
+ TaxonNode subtree = null;\r
+\r
// generate 1024 terms to reproduce the bug\r
- TaxonDescription description = (TaxonDescription) descriptionService.find(D_ABIES_ALBA_UUID);\r
+ TaxonDescription description = (TaxonDescription) descriptionService.find(DESC_ABIES_ALBA_UUID);\r
Set<String> uniqueRandomStrs = new HashSet<>(1024);\r
while(uniqueRandomStrs.size() < 1024){\r
uniqueRandomStrs.add(RandomStringUtils.random(10, true, false));\r
\r
refreshLuceneIndex();\r
\r
- Pager<SearchResult<TaxonBase>> pager = taxonService.findByDescriptionElementFullText(CommonTaxonName.class, "Rot*", null, null, null, false, null, null, null, null);\r
+ Pager<SearchResult<TaxonBase>> pager = taxonService.findByDescriptionElementFullText(CommonTaxonName.class, "Rot*", null, subtree, null, null, false, null, null, null, null);\r
Assert.assertEquals("Expecting all 1024 entities grouped into one SearchResult item when searching for Rot*", 1, pager.getCount().intValue());\r
}\r
\r
@DataSet(loadStrategy=CleanSweepInsertLoadStrategy.class)\r
public final void testFullText_Paging() throws IOException, LuceneParseException {\r
\r
+ TaxonNode subtree = null;\r
Reference sec = ReferenceFactory.newDatabase();\r
referenceService.save(sec);\r
\r
\r
int pageSize = 10;\r
\r
- Pager<SearchResult<TaxonBase>> pager = taxonService.findByDescriptionElementFullText(CommonTaxonName.class, "Rot*", null, null, null, false, pageSize, null, null, null);\r
+ Pager<SearchResult<TaxonBase>> pager = taxonService.findByDescriptionElementFullText(CommonTaxonName.class, "Rot*", null, subtree, null, null, false, pageSize, null, null, null);\r
Assert.assertEquals("unexpeted number of pages", Integer.valueOf(numOfItems / pageSize), pager.getPagesAvailable());\r
- pager = taxonService.findByDescriptionElementFullText(CommonTaxonName.class, "Rot*", null, null, null, false, pageSize, 9, null, null);\r
+ pager = taxonService.findByDescriptionElementFullText(CommonTaxonName.class, "Rot*", null, subtree, null, null, false, pageSize, 9, null, null);\r
Assert.assertNotNull("last page must have records", pager.getRecords());\r
Assert.assertNotNull("last item on last page must exist", pager.getRecords().get(0));\r
- pager = taxonService.findByDescriptionElementFullText(CommonTaxonName.class, "Rot*", null, null, null, false, pageSize, 10, null, null);\r
+ pager = taxonService.findByDescriptionElementFullText(CommonTaxonName.class, "Rot*", null, subtree, null, null, false, pageSize, 10, null, null);\r
Assert.assertNotNull("last page + 1 must not have any records", pager.getRecords());\r
}\r
\r
@Ignore // test fails, maybe the assumptions made here are not compatible with the lucene scoring mechanism see http://lucene.apache.org/core/3_6_1/scoring.html\r
public final void testFullText_ScoreAndOrder_1() throws IOException, LuceneParseException {\r
\r
+ TaxonNode subtree = null;\r
int numOfTaxa = 3;\r
\r
UUID[] taxonUuids = new UUID[numOfTaxa];\r
commitAndStartNewTransaction(null);\r
refreshLuceneIndex();\r
\r
- Pager<SearchResult<TaxonBase>> pager = taxonService.findByDescriptionElementFullText(TextData.class, "Rot", null, null, null, false, null, null, null, null);\r
+ Pager<SearchResult<TaxonBase>> pager = taxonService.findByDescriptionElementFullText(TextData.class, "Rot", null, subtree, null, null, false, null, null, null, null);\r
for(int i = 0; i < numOfTaxa; i++){\r
Assert.assertEquals("taxa should be orderd by relevance (= score)", taxonUuids[numOfTaxa - i - 1], pager.getRecords().get(i).getEntity().getUuid());\r
}\r
@Ignore // test fails, maybe the assumptions made here are not compatible with the lucene scoring mechanism see http://lucene.apache.org/core/3_6_1/scoring.html\r
public final void testFullText_ScoreAndOrder_2() throws IOException, LuceneParseException {\r
\r
+ TaxonNode subtree = null;\r
int numOfTaxa = 3;\r
\r
UUID[] taxonUuids = new UUID[numOfTaxa];\r
commitAndStartNewTransaction(null);\r
refreshLuceneIndex();\r
\r
- Pager<SearchResult<TaxonBase>> pager = taxonService.findByDescriptionElementFullText(TextData.class, "Rot", null, null, null, false, null, null, null, null);\r
+ Pager<SearchResult<TaxonBase>> pager = taxonService.findByDescriptionElementFullText(TextData.class, "Rot", null, subtree, null, null, false, null, null, null, null);\r
for(int i = 0; i < numOfTaxa; i++){\r
Assert.assertEquals("taxa should be orderd by relevance (= score)", taxonUuids[numOfTaxa - i - 1], pager.getRecords().get(i).getEntity().getUuid());\r
}\r
@DataSet\r
public final void testFullText_Grouping() throws IOException, LuceneParseException, LuceneMultiSearchException {\r
\r
- TaxonDescription description = (TaxonDescription) descriptionService.find(D_ABIES_ALBA_UUID);\r
+ TaxonNode subtree = null;\r
+ TaxonDescription description = (TaxonDescription) descriptionService.find(DESC_ABIES_ALBA_UUID);\r
Set<String> uniqueRandomStrs = new HashSet<>(1024);\r
int numOfItems = 100;\r
while(uniqueRandomStrs.size() < numOfItems){\r
boolean highlightFragments = true;\r
\r
// test with findByDescriptionElementFullText\r
- Pager<SearchResult<TaxonBase>> pager = taxonService.findByDescriptionElementFullText(CommonTaxonName.class, "Rot*", null, null, null, highlightFragments, pageSize, null, null, null);\r
+ Pager<SearchResult<TaxonBase>> pager = taxonService.findByDescriptionElementFullText(CommonTaxonName.class, "Rot*", null, subtree, null, null, highlightFragments, pageSize, null, null, null);\r
logFreeTextSearchResults(pager, Level.DEBUG, null);\r
Assert.assertEquals("All matches should be grouped into a single SearchResult element", 1, pager.getRecords().size());\r
Assert.assertEquals("The count property of the pager must be set correctly", 1, pager.getCount().intValue());\r
Assert.assertEquals("expecting 10 highlighted fragments of field 'name'", maxDocsPerGroup, highlightMap.get("name").length);\r
\r
// test with findByEverythingFullText\r
- pager = taxonService.findByEverythingFullText( "Rot*", null, includeUnpublished, null, highlightFragments, pageSize, null, null, null);\r
+ pager = taxonService.findByEverythingFullText( "Rot*", null, subtree, includeUnpublished, null, highlightFragments, pageSize, null, null, null);\r
logFreeTextSearchResults(pager, Level.DEBUG, null);\r
Assert.assertEquals("All matches should be grouped into a single SearchResult element", 1, pager.getRecords().size());\r
Assert.assertEquals("The count property of the pager must be set correctly", 1, pager.getCount().intValue());\r
public final void testFindByDescriptionElementFullText_TextData() throws IOException, LuceneParseException {\r
\r
refreshLuceneIndex();\r
+ TaxonNode subtree = null;\r
\r
- Pager<SearchResult<TaxonBase>> pager = taxonService.findByDescriptionElementFullText(TextData.class, "Abies", null, null, null, false, null, null, null, null);\r
+ Pager<SearchResult<TaxonBase>> pager = taxonService.findByDescriptionElementFullText(TextData.class, "Abies", null, subtree, null, null, false, null, null, null, null);\r
logFreeTextSearchResults(pager, Level.DEBUG, null);\r
Assert.assertEquals("Expecting one entity when searching for any TextData", 1, pager.getCount().intValue());\r
Assert.assertEquals("Abies balsamea sec. Kohlbecker, A., Testcase standart views, 2013", pager.getRecords().get(0).getEntity().getTitleCache());\r
Assert.assertEquals("Abies balsamea sec. Kohlbecker, A., Testcase standart views, 2013", pager.getRecords().get(0).getDocs().iterator().next().get("inDescription.taxon.titleCache"));\r
\r
\r
- pager = taxonService.findByDescriptionElementFullText(null, "Abies", null, null, null, false, null, null, null, null);\r
+ pager = taxonService.findByDescriptionElementFullText(null, "Abies", null, subtree, null, null, false, null, null, null, null);\r
Assert.assertEquals("Expecting one entity when searching for any type", 1, pager.getCount().intValue());\r
\r
- pager = taxonService.findByDescriptionElementFullText(null, "Abies", null, Arrays.asList(new Feature[]{Feature.UNKNOWN()}), null, false, null, null, null, null);\r
+ pager = taxonService.findByDescriptionElementFullText(null, "Abies", null, subtree, Arrays.asList(new Feature[]{Feature.UNKNOWN()}), null, false, null, null, null, null);\r
Assert.assertEquals("Expecting one entity when searching for any type and for Feature DESCRIPTION", 1, pager.getCount().intValue());\r
\r
- pager = taxonService.findByDescriptionElementFullText(null, "Abies", null, Arrays.asList(new Feature[]{Feature.CHROMOSOME_NUMBER()}), null, false, null, null, null, null);\r
+ pager = taxonService.findByDescriptionElementFullText(null, "Abies", null, subtree, Arrays.asList(new Feature[]{Feature.CHROMOSOME_NUMBER()}), null, false, null, null, null, null);\r
Assert.assertEquals("Expecting no entity when searching for any type and for Feature CHROMOSOME_NUMBER", 0, pager.getCount().intValue());\r
\r
- pager = taxonService.findByDescriptionElementFullText(null, "Abies", null, Arrays.asList(new Feature[]{Feature.CHROMOSOME_NUMBER(), Feature.UNKNOWN()}), null, false, null, null, null, null);\r
+ pager = taxonService.findByDescriptionElementFullText(null, "Abies", null, subtree, Arrays.asList(new Feature[]{Feature.CHROMOSOME_NUMBER(), Feature.UNKNOWN()}), null, false, null, null, null, null);\r
Assert.assertEquals("Expecting no entity when searching for any type and for Feature DESCRIPTION or CHROMOSOME_NUMBER", 1, pager.getCount().intValue());\r
\r
- pager = taxonService.findByDescriptionElementFullText(Distribution.class, "Abies", null, null, null, false, null, null, null, null);\r
+ pager = taxonService.findByDescriptionElementFullText(Distribution.class, "Abies", null, subtree, null, null, false, null, null, null, null);\r
Assert.assertEquals("Expecting no entity when searching for Distribution", 0, pager.getCount().intValue());\r
\r
- pager = taxonService.findByDescriptionElementFullText(TextData.class, "Бальзам", null, null, Arrays.asList(new Language[]{}), false, null, null, null, null);\r
+ pager = taxonService.findByDescriptionElementFullText(TextData.class, "Бальзам", null, subtree, null, Arrays.asList(new Language[]{}), false, null, null, null, null);\r
Assert.assertEquals("Expecting one entity", 1, pager.getCount().intValue());\r
Assert.assertEquals("Abies balsamea sec. Kohlbecker, A., Testcase standart views, 2013", pager.getRecords().get(0).getEntity().getTitleCache());\r
\r
- pager = taxonService.findByDescriptionElementFullText(TextData.class, "Бальзам", null, null, Arrays.asList(new Language[]{Language.RUSSIAN()}), false, null, null, null, null);\r
+ pager = taxonService.findByDescriptionElementFullText(TextData.class, "Бальзам", null, subtree, null, Arrays.asList(new Language[]{Language.RUSSIAN()}), false, null, null, null, null);\r
Assert.assertEquals("Expecting one entity", 1, pager.getCount().intValue());\r
Assert.assertEquals("Abies balsamea sec. Kohlbecker, A., Testcase standart views, 2013", pager.getRecords().get(0).getEntity().getTitleCache());\r
\r
- pager = taxonService.findByDescriptionElementFullText(TextData.class, "Бальзам", null, null, Arrays.asList(new Language[]{Language.GERMAN()}), false, null, null, null, null);\r
+ pager = taxonService.findByDescriptionElementFullText(TextData.class, "Бальзам", null, subtree, null, Arrays.asList(new Language[]{Language.GERMAN()}), false, null, null, null, null);\r
Assert.assertEquals("Expecting no entity", 0, pager.getCount().intValue());\r
\r
- pager = taxonService.findByDescriptionElementFullText(TextData.class, "Balsam-Tanne", null, null, Arrays.asList(new Language[]{Language.GERMAN(), Language.RUSSIAN()}), false, null, null, null, null);\r
+ pager = taxonService.findByDescriptionElementFullText(TextData.class, "Balsam-Tanne", null, subtree, null, Arrays.asList(new Language[]{Language.GERMAN(), Language.RUSSIAN()}), false, null, null, null, null);\r
Assert.assertEquals("Expecting one entity", 1, pager.getCount().intValue());\r
Assert.assertEquals("Abies balsamea sec. Kohlbecker, A., Testcase standart views, 2013", pager.getRecords().get(0).getEntity().getTitleCache());\r
}\r
public final void testFindByDescriptionElementFullText_MultipleWords() throws IOException, LuceneParseException {\r
\r
refreshLuceneIndex();\r
+ TaxonNode subtree = null;\r
\r
// Pflanzenart aus der Gattung der Tannen\r
long start = System.currentTimeMillis();\r
\r
- Pager<SearchResult<TaxonBase>> pager = taxonService.findByDescriptionElementFullText(TextData.class, "Pflanzenart Tannen", null, null, null, false, null, null, null, null);\r
+ Pager<SearchResult<TaxonBase>> pager = taxonService.findByDescriptionElementFullText(TextData.class, "Pflanzenart Tannen", null, subtree, null, null, false, null, null, null, null);\r
Assert.assertEquals("OR search : Expecting one entity", 1, pager.getCount().intValue());\r
\r
- pager = taxonService.findByDescriptionElementFullText(TextData.class, "Pflanzenart Wespen", null, null, null, false, null, null, null, null);\r
+ pager = taxonService.findByDescriptionElementFullText(TextData.class, "Pflanzenart Wespen", null, subtree, null, null, false, null, null, null, null);\r
Assert.assertEquals("OR search : Expecting one entity", 1, pager.getCount().intValue());\r
\r
- pager = taxonService.findByDescriptionElementFullText(TextData.class, "+Pflanzenart +Tannen", null, null, null, false, null, null, null, null);\r
+ pager = taxonService.findByDescriptionElementFullText(TextData.class, "+Pflanzenart +Tannen", null, subtree, null, null, false, null, null, null, null);\r
Assert.assertEquals("AND search : Expecting one entity", 1, pager.getCount().intValue());\r
\r
- pager = taxonService.findByDescriptionElementFullText(TextData.class, "+Pflanzenart +Wespen", null, null, null, false, null, null, null, null);\r
+ pager = taxonService.findByDescriptionElementFullText(TextData.class, "+Pflanzenart +Wespen", null, subtree, null, null, false, null, null, null, null);\r
Assert.assertEquals("AND search : Expecting no entity", 0, pager.getCount().intValue());\r
\r
- pager = taxonService.findByDescriptionElementFullText(TextData.class, "\"Pflanzenart aus der Gattung der Tannen\"", null, null, null, false, null, null, null, null);\r
+ pager = taxonService.findByDescriptionElementFullText(TextData.class, "\"Pflanzenart aus der Gattung der Tannen\"", null, subtree, null, null, false, null, null, null, null);\r
Assert.assertEquals("Phrase search : Expecting one entity", 1, pager.getCount().intValue());\r
\r
- pager = taxonService.findByDescriptionElementFullText(TextData.class, "\"Pflanzenart aus der Gattung der Wespen\"", null, null, null, false, null, null, null, null);\r
+ pager = taxonService.findByDescriptionElementFullText(TextData.class, "\"Pflanzenart aus der Gattung der Wespen\"", null, subtree, null, null, false, null, null, null, null);\r
Assert.assertEquals("Phrase search : Expecting one entity", 0, pager.getCount().intValue());\r
\r
logger.info("testFindByDescriptionElementFullText_MultipleWords() duration: " + (System.currentTimeMillis() - start) + "ms");\r
public final void testFindByDescriptionElementFullText_modify_DescriptionElement() throws IOException, LuceneParseException {\r
\r
refreshLuceneIndex();\r
+ TaxonNode subtree = null;\r
\r
//\r
// modify the DescriptionElement\r
- Pager<SearchResult<TaxonBase>> pager = taxonService.findByDescriptionElementFullText(TextData.class, "Balsam-Tanne", null, null, Arrays.asList(new Language[]{Language.GERMAN(), Language.RUSSIAN()}), false, null, null, null, null);\r
+ Pager<SearchResult<TaxonBase>> pager = taxonService.findByDescriptionElementFullText(TextData.class, "Balsam-Tanne", null, subtree, null, Arrays.asList(new Language[]{Language.GERMAN(), Language.RUSSIAN()}), false, null, null, null, null);\r
Assert.assertTrue("Search did not return any results", pager.getRecords().size() > 0);\r
Assert.assertTrue("Expecting only one doc", pager.getRecords().get(0).getDocs().size() == 1);\r
Document indexDocument = pager.getRecords().get(0).getDocs().iterator().next();\r
// );\r
\r
//\r
- pager = taxonService.findByDescriptionElementFullText(TextData.class, "Balsam-Tanne", null, null, Arrays.asList(new Language[]{Language.GERMAN(), Language.RUSSIAN()}), false, null, null, null, null);\r
+ pager = taxonService.findByDescriptionElementFullText(TextData.class, "Balsam-Tanne", null, subtree, null, Arrays.asList(new Language[]{Language.GERMAN(), Language.RUSSIAN()}), false, null, null, null, null);\r
Assert.assertEquals("The german 'Balsam-Tanne' TextData should no longer be indexed", 0, pager.getCount().intValue());\r
- pager = taxonService.findByDescriptionElementFullText(TextData.class, "abeto", null, null, Arrays.asList(new Language[]{Language.SPANISH_CASTILIAN()}), false, null, null, null, null);\r
+ pager = taxonService.findByDescriptionElementFullText(TextData.class, "abeto", null, subtree, null, Arrays.asList(new Language[]{Language.SPANISH_CASTILIAN()}), false, null, null, null, null);\r
Assert.assertEquals("expecting to find the SPANISH_CASTILIAN 'abeto bals"+UTF8.SMALL_A_ACUTE+"mico'", 1, pager.getCount().intValue());\r
- pager = taxonService.findByDescriptionElementFullText(TextData.class, "bals"+UTF8.SMALL_A_ACUTE+"mico", null, null, null, false, null, null, null, null);\r
+ pager = taxonService.findByDescriptionElementFullText(TextData.class, "bals"+UTF8.SMALL_A_ACUTE+"mico", null, subtree, null, null, false, null, null, null, null);\r
Assert.assertEquals("expecting to find the SPANISH_CASTILIAN 'abeto bals"+UTF8.SMALL_A_ACUTE+"mico'", 1, pager.getCount().intValue());\r
\r
//\r
}\r
descriptionService.saveOrUpdate(description);\r
commitAndStartNewTransaction(null);\r
- pager = taxonService.findByDescriptionElementFullText(TextData.class, "abeto", null, null, Arrays.asList(new Language[]{Language.SPANISH_CASTILIAN()}), false, null, null, null, null);\r
+ pager = taxonService.findByDescriptionElementFullText(TextData.class, "abeto", null, subtree, null, Arrays.asList(new Language[]{Language.SPANISH_CASTILIAN()}), false, null, null, null, null);\r
Assert.assertEquals("The spanish 'abeto bals"+UTF8.SMALL_A_ACUTE+"mico' TextData should no longer be indexed", 0, pager.getCount().intValue());\r
- pager = taxonService.findByDescriptionElementFullText(TextData.class, "balsamiczna", null, null, Arrays.asList(new Language[]{Language.POLISH()}), false, null, null, null, null);\r
+ pager = taxonService.findByDescriptionElementFullText(TextData.class, "balsamiczna", null, subtree, null, Arrays.asList(new Language[]{Language.POLISH()}), false, null, null, null, null);\r
Assert.assertEquals("expecting to find the POLISH 'Jod"+UTF8.POLISH_L+"a balsamiczna'", 1, pager.getCount().intValue());\r
}\r
\r
public final void testFindByDescriptionElementFullText_modify_Taxon() throws IOException, LuceneParseException {\r
\r
refreshLuceneIndex();\r
+ TaxonNode subtree = null;\r
\r
Taxon t_abies_balsamea = (Taxon)taxonService.find(ABIES_BALSAMEA_UUID);\r
- TaxonDescription d_abies_balsamea = (TaxonDescription)descriptionService.find(D_ABIES_BALSAMEA_UUID);\r
+ TaxonDescription d_abies_balsamea = (TaxonDescription)descriptionService.find(DESC_ABIES_BALSAMEA_UUID);\r
\r
- Pager<SearchResult<TaxonBase>> pager = taxonService.findByDescriptionElementFullText(TextData.class, "Balsam-Tanne", null, null, Arrays.asList(new Language[]{Language.GERMAN()}), false, null, null, null, null);\r
+ Pager<SearchResult<TaxonBase>> pager = taxonService.findByDescriptionElementFullText(TextData.class, "Balsam-Tanne",\r
+ null, subtree, null, Arrays.asList(new Language[]{Language.GERMAN()}), false, null, null, null, null);\r
Assert.assertEquals("expecting to find the GERMAN 'Balsam-Tanne'", 1, pager.getCount().intValue());\r
\r
// exchange the Taxon with another one via the Taxon object\r
\r
t_abies_balsamea = (Taxon)taxonService.find(t_abies_balsamea.getUuid());\r
\r
- pager = taxonService.findByDescriptionElementFullText(TextData.class, "Balsam-Tanne", null, null, Arrays.asList(new Language[]{Language.GERMAN()}), false, null, null, null, null);\r
+ pager = taxonService.findByDescriptionElementFullText(TextData.class, "Balsam-Tanne",\r
+ null, subtree, null, Arrays.asList(new Language[]{Language.GERMAN()}), false, null, null, null, null);\r
Assert.assertEquals("'Balsam-Tanne' should no longer be found", 0, pager.getCount().intValue());\r
\r
// 2.) create new description and add to taxon:\r
// "DESCRIPTIONBASE"\r
// });\r
\r
- pager = taxonService.findByDescriptionElementFullText(TextData.class, "mittelgro"+UTF8.SHARP_S+"er Baum", null, null, Arrays.asList(new Language[]{Language.GERMAN()}), false, null, null, null, null);\r
+ pager = taxonService.findByDescriptionElementFullText(TextData.class, "mittelgro"+UTF8.SHARP_S+"er Baum",\r
+ null, subtree, null, Arrays.asList(new Language[]{Language.GERMAN()}), false, null, null, null, null);\r
Assert.assertEquals("the taxon should be found via the new Description", 1, pager.getCount().intValue());\r
}\r
\r
public final void testFindByDescriptionElementFullText_modify_Classification() throws IOException, LuceneParseException {\r
\r
refreshLuceneIndex();\r
+ TaxonNode subtree = null;\r
\r
// put taxon into other classification, new taxon node\r
Classification classification = classificationService.find(CLASSIFICATION_UUID);\r
// TODO: why is the test failing when the childNode is already retrieved here, and not after the following four lines?\r
//TaxonNode childNode = classification.getChildNodes().iterator().next();\r
\r
- Pager<SearchResult<TaxonBase>> pager = taxonService.findByDescriptionElementFullText(TextData.class, "Balsam-Tanne", null, null, Arrays.asList(new Language[]{Language.GERMAN()}), false, null, null, null, null);\r
+ Pager<SearchResult<TaxonBase>> pager = taxonService.findByDescriptionElementFullText(TextData.class, "Balsam-Tanne", null, subtree, null, Arrays.asList(new Language[]{Language.GERMAN()}), false, null, null, null, null);\r
Assert.assertEquals("expecting to find the GERMAN 'Balsam-Tanne' even if filtering by classification", 1, pager.getCount().intValue());\r
- pager = taxonService.findByDescriptionElementFullText(TextData.class, "Balsam-Tanne", alternateClassification, null, Arrays.asList(new Language[]{Language.GERMAN()}), false, null, null, null, null);\r
+ pager = taxonService.findByDescriptionElementFullText(TextData.class, "Balsam-Tanne", alternateClassification, subtree, null, Arrays.asList(new Language[]{Language.GERMAN()}), false, null, null, null, null);\r
Assert.assertEquals("GERMAN 'Balsam-Tanne' should NOT be found in other classification", 0, pager.getCount().intValue());\r
\r
// check for the right taxon node\r
\r
// reload classification\r
classification = classificationService.find(CLASSIFICATION_UUID);\r
- pager = taxonService.findByDescriptionElementFullText(TextData.class, "Balsam-Tanne", alternateClassification, null, Arrays.asList(new Language[]{Language.GERMAN()}), false, null, null, null, null);\r
+ pager = taxonService.findByDescriptionElementFullText(TextData.class, "Balsam-Tanne",\r
+ alternateClassification, subtree, null, Arrays.asList(new Language[]{Language.GERMAN()}), false, null, null, null, null);\r
Assert.assertEquals("GERMAN 'Balsam-Tanne' should now be found in other classification", 1, pager.getCount().intValue());\r
\r
classification.getChildNodes().clear();\r
classificationService.saveOrUpdate(classification);\r
commitAndStartNewTransaction(null);\r
\r
- pager = taxonService.findByDescriptionElementFullText(TextData.class, "Balsam-Tanne", classification, null, Arrays.asList(new Language[]{Language.GERMAN()}), false, null, null, null, null);\r
+ pager = taxonService.findByDescriptionElementFullText(TextData.class, "Balsam-Tanne",\r
+ classification, subtree, null, Arrays.asList(new Language[]{Language.GERMAN()}), false, null, null, null, null);\r
Assert.assertEquals("Now the GERMAN 'Balsam-Tanne' should NOT be found in original classification", 0, pager.getCount().intValue());\r
\r
}\r
@DataSet\r
public final void testFindByDescriptionElementFullText_CategoricalData() throws IOException, LuceneParseException {\r
\r
+ TaxonNode subtree = null;\r
// add CategoricalData\r
- DescriptionBase d_abies_balsamea = descriptionService.find(D_ABIES_BALSAMEA_UUID);\r
+ DescriptionBase d_abies_balsamea = descriptionService.find(DESC_ABIES_BALSAMEA_UUID);\r
// Categorical data\r
CategoricalData cdata = CategoricalData.NewInstance();\r
cdata.setFeature(Feature.DESCRIPTION());\r
\r
refreshLuceneIndex();\r
\r
- Pager<SearchResult<TaxonBase>> pager = taxonService.findByDescriptionElementFullText(CategoricalData.class, "green", null, null, null, false, null, null, null, null);\r
+ Pager<SearchResult<TaxonBase>> pager = taxonService.findByDescriptionElementFullText(CategoricalData.class, "green", null, subtree, null, null, false, null, null, null, null);\r
Assert.assertEquals("Expecting one entity", 1, pager.getCount().intValue());\r
Assert.assertEquals("Abies balsamea sec. Kohlbecker, A., Testcase standart views, 2013", pager.getRecords().get(0).getEntity().getTitleCache());\r
Assert.assertTrue("Expecting only one doc", pager.getRecords().get(0).getDocs().size() == 1);\r
public final void testFindByDescriptionElementFullText_Highlighting() throws IOException, LuceneParseException {\r
\r
refreshLuceneIndex();\r
+ TaxonNode subtree = null;\r
\r
- Pager<SearchResult<TaxonBase>> pager = taxonService.findByDescriptionElementFullText(TextData.class, "Abies", null, null, null, true, null, null, null, null);\r
+ Pager<SearchResult<TaxonBase>> pager = taxonService.findByDescriptionElementFullText(TextData.class, "Abies", null, subtree, null, null, true, null, null, null, null);\r
Assert.assertEquals("Expecting one entity when searching for any TextData", 1, pager.getCount().intValue());\r
SearchResult<TaxonBase> searchResult = pager.getRecords().get(0);\r
Assert.assertTrue("the map of highlighted fragments should contain at least one item", searchResult.getFieldHighlightMap().size() > 0);\r
String[] fragments = searchResult.getFieldHighlightMap().values().iterator().next();\r
Assert.assertTrue("first fragments should contains serch term", fragments[0].contains("<B>Abies</B>"));\r
\r
- pager = taxonService.findByDescriptionElementFullText(TextData.class, "Pflanzenart Tannen", null, null, null, true, null, null, null, null);\r
+ pager = taxonService.findByDescriptionElementFullText(TextData.class, "Pflanzenart Tannen", null, subtree, null, null, true, null, null, null, null);\r
searchResult = pager.getRecords().get(0);\r
Assert.assertTrue("Phrase search : Expecting at least one item in highlighted fragments", searchResult.getFieldHighlightMap().size() > 0);\r
fragments = searchResult.getFieldHighlightMap().values().iterator().next();\r
Assert.assertTrue("first fragments should contains serch term", fragments[0].contains("<B>Pflanzenart</B>") || fragments[0].contains("<B>Tannen</B>"));\r
\r
- pager = taxonService.findByDescriptionElementFullText(TextData.class, "+Pflanzenart +Tannen", null, null, null, true, null, null, null, null);\r
+ pager = taxonService.findByDescriptionElementFullText(TextData.class, "+Pflanzenart +Tannen", null, subtree, null, null, true, null, null, null, null);\r
searchResult = pager.getRecords().get(0);\r
Assert.assertTrue("Phrase search : Expecting at least one item in highlighted fragments", searchResult.getFieldHighlightMap().size() > 0);\r
fragments = searchResult.getFieldHighlightMap().values().iterator().next();\r
Assert.assertTrue("first fragments should contains serch term", fragments[0].contains("<B>Pflanzenart</B>") && fragments[0].contains("<B>Tannen</B>"));\r
\r
- pager = taxonService.findByDescriptionElementFullText(TextData.class, "\"Pflanzenart aus der Gattung der Tannen\"", null, null, null, true, null, null, null, null);\r
+ pager = taxonService.findByDescriptionElementFullText(TextData.class, "\"Pflanzenart aus der Gattung der Tannen\"", null, subtree, null, null, true, null, null, null, null);\r
searchResult = pager.getRecords().get(0);\r
Assert.assertTrue("Phrase search : Expecting at least one item in highlighted fragments", searchResult.getFieldHighlightMap().size() > 0);\r
fragments = searchResult.getFieldHighlightMap().values().iterator().next();\r
Assert.assertTrue("first fragments should contains serch term", fragments[0].contains("<B>Pflanzenart</B> <B>aus</B> <B>der</B> <B>Gattung</B> <B>der</B> <B>Tannen</B>"));\r
\r
- pager = taxonService.findByDescriptionElementFullText(TextData.class, "Gatt*", null, null, null, true, null, null, null, null);\r
+ pager = taxonService.findByDescriptionElementFullText(TextData.class, "Gatt*", null, subtree, null, null, true, null, null, null, null);\r
searchResult = pager.getRecords().get(0);\r
Assert.assertTrue("Wildcard search : Expecting at least one item in highlighted fragments", searchResult.getFieldHighlightMap().size() > 0);\r
fragments = searchResult.getFieldHighlightMap().values().iterator().next();\r
refreshLuceneIndex();\r
\r
classificationService.find(CLASSIFICATION_UUID);\r
+ TaxonNode subtree = null;\r
\r
boolean NO_UNPUBLISHED = false;\r
\r
- Pager<SearchResult<TaxonBase>> pager = taxonService.findByFullText(null, "Abies", null, includeUnpublished, null, true, null, null, null, null); // --> 7\r
+ Pager<SearchResult<TaxonBase>> pager = taxonService.findByFullText(null, "Abies", null, subtree, includeUnpublished, null, true, null, null, null, null); // --> 7\r
// logFreeTextSearchResults(pager, Level.DEBUG, null);\r
Assert.assertEquals("Expecting 8 entities", 8, pager.getCount().intValue());\r
\r
+ //subtree\r
+ subtree = nodeService.find(ROOTNODE_CLASSIFICATION_5000);\r
+ pager = taxonService.findByFullText(null, "Abies", null, subtree, includeUnpublished, null, true, null, null, null, null); // --> 0\r
+ Assert.assertEquals("Expecting 2 entities", 2, pager.getCount().intValue());\r
+ subtree = null;\r
\r
- pager = taxonService.findByFullText(null, "Abies", null, NO_UNPUBLISHED, null, true, null, null, null, null); // --> 7\r
+ pager = taxonService.findByFullText(null, "Abies", null, subtree, NO_UNPUBLISHED, null, true, null, null, null, null); // --> 7\r
// logFreeTextSearchResults(pager, Level.DEBUG, null);\r
Assert.assertEquals("Expecting 6 entities", 6, pager.getCount().intValue());\r
Synonym abiesSubalpina = (Synonym)taxonService.find(ABIES_SUBALPINA_UUID);\r
//accepted published, syn not published\r
abiesSubalpina.getAcceptedTaxon().setPublish(true);\r
commitAndStartNewTransaction();\r
- pager = taxonService.findByFullText(null, "Abies", null, NO_UNPUBLISHED, null, true, null, null, null, null); // --> 7\r
+ pager = taxonService.findByFullText(null, "Abies", null, subtree, NO_UNPUBLISHED, null, true, null, null, null, null); // --> 7\r
Assert.assertEquals("Expecting 7 entities", 7, pager.getCount().intValue());\r
\r
//accepted published, syn published\r
abiesSubalpina = (Synonym)taxonService.find(abiesSubalpina.getUuid());\r
abiesSubalpina.setPublish(true);\r
commitAndStartNewTransaction();\r
- pager = taxonService.findByFullText(null, "Abies", null, NO_UNPUBLISHED, null, true, null, null, null, null); // --> 7\r
+ pager = taxonService.findByFullText(null, "Abies", null, subtree, NO_UNPUBLISHED, null, true, null, null, null, null); // --> 7\r
Assert.assertEquals("Expecting 8 entities", 8, pager.getCount().intValue());\r
\r
//accepted not published, syn published\r
abiesSubalpina = (Synonym)taxonService.find(abiesSubalpina.getUuid());\r
abiesSubalpina.getAcceptedTaxon().setPublish(false);\r
commitAndStartNewTransaction();\r
- pager = taxonService.findByFullText(null, "Abies", null, NO_UNPUBLISHED, null, true, null, null, null, null); // --> 7\r
+ pager = taxonService.findByFullText(null, "Abies", null, subtree, NO_UNPUBLISHED, null, true, null, null, null, null); // --> 7\r
Assert.assertEquals("Expecting 6 entities. Synonym and accepted should not be found, though synonym is published",\r
6, pager.getCount().intValue());\r
\r
- pager = taxonService.findByFullText(Taxon.class, "Abies", null, includeUnpublished, null, true, null, null, null, null); // --> 6\r
+ pager = taxonService.findByFullText(Taxon.class, "Abies", null, subtree, includeUnpublished, null, true, null, null, null, null); // --> 6\r
Assert.assertEquals("Expecting 7 entities", 7, pager.getCount().intValue());\r
\r
- pager = taxonService.findByFullText(Synonym.class, "Abies", null, includeUnpublished, null, true, null, null, null, null); // --> 1\r
+ pager = taxonService.findByFullText(Synonym.class, "Abies", null, subtree, includeUnpublished, null, true, null, null, null, null); // --> 1\r
Assert.assertEquals("Expecting 1 entity", 1, pager.getCount().intValue());\r
- pager = taxonService.findByFullText(Synonym.class, "Abies", null, NO_UNPUBLISHED, null, true, null, null, null, null); // --> 1\r
+ pager = taxonService.findByFullText(Synonym.class, "Abies", null, subtree, NO_UNPUBLISHED, null, true, null, null, null, null); // --> 1\r
Assert.assertEquals("Expecting 0 entity", 0, pager.getCount().intValue());\r
\r
- pager = taxonService.findByFullText(TaxonBase.class, "sec", null, includeUnpublished, null, true, null, null, null, null); // --> 7\r
+ pager = taxonService.findByFullText(TaxonBase.class, "sec", null, subtree, includeUnpublished, null, true, null, null, null, null); // --> 7\r
Assert.assertEquals("Expecting 8 entities", 9, pager.getCount().intValue());\r
\r
- pager = taxonService.findByFullText(null, "genus", null, includeUnpublished, null, true, null, null, null, null); // --> 1\r
+ pager = taxonService.findByFullText(null, "genus", null, subtree, includeUnpublished, null, true, null, null, null, null); // --> 1\r
Assert.assertEquals("Expecting 1 entity", 1, pager.getCount().intValue());\r
\r
- pager = taxonService.findByFullText(Taxon.class, "subalpina", null, includeUnpublished, null, true, null, null, null, null); // --> 0\r
+ pager = taxonService.findByFullText(Taxon.class, "subalpina", null, subtree, includeUnpublished, null, true, null, null, null, null); // --> 0\r
Assert.assertEquals("Expecting 0 entities", 0, pager.getCount().intValue());\r
\r
+\r
// synonym in classification ???\r
}\r
\r
areaFilter.add(canada);\r
areaFilter.add(russia);\r
\r
- Pager<SearchResult<TaxonBase>> pager = taxonService.findByDistribution(areaFilter, statusFilter, null, 20, 0, null, null);\r
+ Pager<SearchResult<TaxonBase>> pager = taxonService.findByDistribution(areaFilter, statusFilter, null, null, 20, 0, null, null);\r
Assert.assertEquals("Expecting 2 entities", Integer.valueOf(2), Integer.valueOf(pager.getRecords().size()));\r
\r
}\r
\r
+ @Test\r
+ @DataSet\r
+ public final void testFindTaxaAndNamesByFullText_synonymClassificationSubtree() throws IOException, LuceneParseException, LuceneMultiSearchException {\r
+\r
+ refreshLuceneIndex();\r
+ Classification classification = null;\r
+ TaxonNode subtree = null;\r
+\r
+ //\r
+ Pager<SearchResult<TaxonBase>> pager;\r
+ EnumSet<TaxaAndNamesSearchMode> taxaAndSynonyms = EnumSet.of(TaxaAndNamesSearchMode.doTaxa, TaxaAndNamesSearchMode.doSynonyms, TaxaAndNamesSearchMode.includeUnpublished);\r
+ pager = taxonService.findTaxaAndNamesByFullText(\r
+ taxaAndSynonyms, "Abies", classification, subtree, null, null, null, true, null, null, null, null);\r
+ Assert.assertEquals("doTaxa & doSynonyms & unpublished", 8, pager.getCount().intValue());\r
+\r
+ EnumSet<TaxaAndNamesSearchMode> taxaOnly = EnumSet.of(TaxaAndNamesSearchMode.doTaxa, TaxaAndNamesSearchMode.includeUnpublished);\r
+\r
+ //classification\r
+ classification = classificationService.find(CLASSIFICATION_UUID);\r
+ pager = taxonService.findTaxaAndNamesByFullText(\r
+ taxaAndSynonyms, "Abies", classification, subtree, null, null, null, true, null, null, null, null);\r
+ Assert.assertEquals("doTaxa & doSynonyms & unpublished", 2, pager.getCount().intValue());\r
+ //taxa only\r
+ pager = taxonService.findTaxaAndNamesByFullText(\r
+ taxaOnly, "Abies", classification, subtree, null, null, null, true, null, null, null, null);\r
+ Assert.assertEquals("doTaxa & unpublished", 1, pager.getCount().intValue());\r
+ //synonyms only\r
+ pager = taxonService.findTaxaAndNamesByFullText(\r
+ taxaOnly, "Abies", classification, subtree, null, null, null, true, null, null, null, null);\r
+ Assert.assertEquals("doSynonyms & unpublished", 1, pager.getCount().intValue());\r
+\r
+ classification = null;\r
+\r
+ //subtree\r
+ subtree = nodeService.find(ROOTNODE_CLASSIFICATION_5000);\r
+ pager = taxonService.findTaxaAndNamesByFullText(\r
+ taxaAndSynonyms, "Abies", classification, subtree, null, null, null, true, null, null, null, null);\r
+ Assert.assertEquals("doTaxa & doSynonyms & unpublished", 2, pager.getCount().intValue());\r
+ //taxa only\r
+ pager = taxonService.findTaxaAndNamesByFullText(\r
+ taxaOnly, "Abies", classification, subtree, null, null, null, true, null, null, null, null);\r
+ Assert.assertEquals("doTaxa & unpublished", 1, pager.getCount().intValue());\r
+ subtree = null;\r
+\r
+ }\r
+\r
@Test\r
@DataSet\r
public final void testFindTaxaAndNamesByFullText() throws IOException, LuceneParseException, LuceneMultiSearchException {\r
\r
refreshLuceneIndex();\r
+ TaxonNode subtree = null;\r
\r
Classification alternateClassification = classificationService.find(CLASSIFICATION_ALT_UUID);\r
Synonym abiesSubalpina = (Synonym)taxonService.find(ABIES_SUBALPINA_UUID);\r
\r
Pager<SearchResult<TaxonBase>> pager;\r
- pager = taxonService.findTaxaAndNamesByFullText(\r
- EnumSet.of(TaxaAndNamesSearchMode.doTaxa, TaxaAndNamesSearchMode.doSynonyms, TaxaAndNamesSearchMode.includeUnpublished),\r
- "Abies", null, null, null, null, true, null, null, null, null);\r
-// logPagerRecords(pager, Level.DEBUG);\r
+ EnumSet<TaxaAndNamesSearchMode> modes = EnumSet.of(TaxaAndNamesSearchMode.doTaxa, TaxaAndNamesSearchMode.doSynonyms, TaxaAndNamesSearchMode.includeUnpublished);\r
+ pager = taxonService.findTaxaAndNamesByFullText(\r
+ modes, "Abies", null, subtree, null, null, null, true, null, null, null, null);\r
Assert.assertEquals("doTaxa & doSynonyms & unpublished", 8, pager.getCount().intValue());\r
+// logPagerRecords(pager, Level.DEBUG);\r
+\r
+ //unpublished\r
pager = taxonService.findTaxaAndNamesByFullText(TaxaAndNamesSearchMode.taxaAndSynonyms(),\r
- "Abies", null, null, null, null, true, null, null, null, null);\r
+ "Abies", null, subtree, null, null, null, true, null, null, null, null);\r
Assert.assertEquals("doTaxa & doSynonyms, published only", 6, pager.getCount().intValue());\r
\r
//accepted published, syn not published\r
abiesSubalpina.getAcceptedTaxon().setPublish(true);\r
commitAndStartNewTransaction();\r
pager = taxonService.findTaxaAndNamesByFullText(TaxaAndNamesSearchMode.taxaAndSynonyms(),\r
- "Abies", null, null, null, null, true, null, null, null, null);\r
+ "Abies", null, subtree, null, null, null, true, null, null, null, null);\r
Assert.assertEquals("doTaxa & doSynonyms, accepted published", 7, pager.getCount().intValue());\r
\r
//accepted published, syn published\r
abiesSubalpina.setPublish(true);\r
commitAndStartNewTransaction();\r
pager = taxonService.findTaxaAndNamesByFullText(TaxaAndNamesSearchMode.taxaAndSynonyms(),\r
- "Abies", null, null, null, null, true, null, null, null, null);\r
+ "Abies", null, subtree, null, null, null, true, null, null, null, null);\r
Assert.assertEquals("Expecting 8 entities", 8, pager.getCount().intValue());\r
\r
//accepted not published, syn published\r
abiesSubalpina.getAcceptedTaxon().setPublish(false);\r
commitAndStartNewTransaction();\r
pager = taxonService.findTaxaAndNamesByFullText(TaxaAndNamesSearchMode.taxaAndSynonyms(),\r
- "Abies", null, null, null, null, true, null, null, null, null);\r
+ "Abies", null, subtree, null, null, null, true, null, null, null, null);\r
Assert.assertEquals("Expecting 6 entities. Synonym and accepted should not be found, though synonym is published",\r
6, pager.getCount().intValue());\r
\r
EnumSet<TaxaAndNamesSearchMode> searchMode = EnumSet.allOf(TaxaAndNamesSearchMode.class);\r
pager = taxonService.findTaxaAndNamesByFullText(\r
- searchMode, "Abies", null, null, null, null, true, null, null, null, null);\r
+ searchMode, "Abies", null, subtree, null, null, null, true, null, null, null, null);\r
// logPagerRecords(pager, Level.DEBUG);\r
Assert.assertEquals("all search modes", 8, pager.getCount().intValue());\r
searchMode.remove(TaxaAndNamesSearchMode.includeUnpublished);\r
pager = taxonService.findTaxaAndNamesByFullText(\r
- searchMode, "Abies", null, null, null, null, true, null, null, null, null);\r
+ searchMode, "Abies", null, subtree, null, null, null, true, null, null, null, null);\r
Assert.assertEquals("all search modes except unpublished", 6, pager.getCount().intValue());\r
\r
pager = taxonService.findTaxaAndNamesByFullText(EnumSet.allOf(TaxaAndNamesSearchMode.class),\r
- "Abies", alternateClassification, null, null, null, true, null, null, null, null);\r
+ "Abies", alternateClassification, subtree, null, null, null, true, null, null, null, null);\r
// logPagerRecords(pager, Level.DEBUG);\r
Assert.assertEquals("all search modes, filtered by alternateClassification", 1, pager.getCount().intValue());\r
\r
pager = taxonService.findTaxaAndNamesByFullText(\r
EnumSet.of(TaxaAndNamesSearchMode.doSynonyms, TaxaAndNamesSearchMode.includeUnpublished),\r
- "Abies", null, null, null, null, true, null, null, null, null);\r
+ "Abies", null, subtree, null, null, null, true, null, null, null, null);\r
Assert.assertEquals("Expecting 2 entity", 2, pager.getCount().intValue());\r
Set<UUID> uuids = getTaxonUuidSet(pager);\r
Assert.assertTrue("The real synonym should be contained", uuids.contains(ABIES_SUBALPINA_UUID));\r
Assert.assertTrue("The pro parte synonym should be contained",uuids.contains(ABIES_LASIOCARPA_UUID));\r
//without published\r
pager = taxonService.findTaxaAndNamesByFullText(EnumSet.of(TaxaAndNamesSearchMode.doSynonyms),\r
- "Abies", null, null, null, null, true, null, null, null, null);\r
+ "Abies", null, subtree, null, null, null, true, null, null, null, null);\r
Assert.assertEquals("Expecting 0 entities", 0, pager.getCount().intValue());\r
\r
\r
pager = taxonService.findTaxaAndNamesByFullText(\r
EnumSet.of(TaxaAndNamesSearchMode.doTaxaByCommonNames, TaxaAndNamesSearchMode.includeUnpublished),\r
- "Abies", null, null, null, null, true, null, null, null, null);\r
+ "Abies", null, subtree, null, null, null, true, null, null, null, null);\r
Assert.assertEquals("Expecting 0 entity", 0, pager.getCount().intValue());\r
\r
pager = taxonService.findTaxaAndNamesByFullText(\r
EnumSet.of(TaxaAndNamesSearchMode.doTaxaByCommonNames, TaxaAndNamesSearchMode.includeUnpublished),\r
- "Tanne", null, null, null, null, true, null, null, null, null);\r
+ "Tanne", null, subtree, null, null, null, true, null, null, null, null);\r
Assert.assertEquals("Expecting 1 entity", 1, pager.getRecords().size());\r
Assert.assertEquals("Expecting 1 entity", 1, pager.getCount().intValue());\r
pager = taxonService.findTaxaAndNamesByFullText(\r
EnumSet.of(TaxaAndNamesSearchMode.doTaxaByCommonNames),\r
- "Tanne", null, null, null, null, true, null, null, null, null);\r
+ "Tanne", null, subtree, null, null, null, true, null, null, null, null);\r
Assert.assertEquals("Expecting 0 entity", 0, pager.getRecords().size());\r
\r
//misapplied names\r
pager = taxonService.findTaxaAndNamesByFullText(\r
EnumSet.of(TaxaAndNamesSearchMode.doMisappliedNames, TaxaAndNamesSearchMode.includeUnpublished),\r
- "kawakamii", (Classification)null, null, null, null, true, null, null, null, null);\r
+ "kawakamii", (Classification)null, subtree, null, null, null, true, null, null, null, null);\r
logFreeTextSearchResults(pager, Level.DEBUG, null);\r
Assert.assertEquals("Expecting 1 entity", 1, pager.getCount().intValue());\r
//unpublish accepted taxon\r
pager = taxonService.findTaxaAndNamesByFullText(\r
EnumSet.of(TaxaAndNamesSearchMode.doMisappliedNames),\r
- "kawakamii", (Classification)null, null, null, null, true, null, null, null, null);\r
+ "kawakamii", (Classification)null, subtree, null, null, null, true, null, null, null, null);\r
Assert.assertEquals("Expecting 0 entities", 0, pager.getCount().intValue());\r
//published accepted taxon/misapplied name\r
Taxon abiesBalsamea = (Taxon)taxonService.find(ABIES_BALSAMEA_UUID);\r
commitAndStartNewTransaction();\r
pager = taxonService.findTaxaAndNamesByFullText(\r
EnumSet.of(TaxaAndNamesSearchMode.doMisappliedNames),\r
- "kawakamii", (Classification)null, null, null, null, true, null, null, null, null);\r
+ "kawakamii", (Classification)null, subtree, null, null, null, true, null, null, null, null);\r
Assert.assertEquals("Expecting 1 entities", 1, pager.getCount().intValue());\r
//unpublished misapplied name\r
- Taxon misapplied = (Taxon)taxonService.find(D_ABIES_KAWAKAMII_SEC_KOMAROV_UUID);\r
+ Taxon misapplied = (Taxon)taxonService.find(DESC_ABIES_KAWAKAMII_SEC_KOMAROV_UUID);\r
misapplied.setPublish(false);\r
commitAndStartNewTransaction();\r
pager = taxonService.findTaxaAndNamesByFullText(\r
EnumSet.of(TaxaAndNamesSearchMode.doMisappliedNames),\r
- "kawakamii", (Classification)null, null, null, null, true, null, null, null, null);\r
+ "kawakamii", (Classification)null, subtree, null, null, null, true, null, null, null, null);\r
Assert.assertEquals("Expecting 0 entities", 0, pager.getCount().intValue());\r
\r
}\r
\r
+ @Test\r
+ @DataSet\r
+ public final void testFindTaxaAndNamesByFullText_wildcard() throws IOException, LuceneParseException, LuceneMultiSearchException {\r
+\r
+ refreshLuceneIndex();\r
+ TaxonNode subtree = null;\r
+\r
+ Pager<SearchResult<TaxonBase>> pager;\r
+ pager = taxonService.findTaxaAndNamesByFullText(TaxaAndNamesSearchMode.taxaAndSynonyms(),\r
+ "Abi*", null, subtree, null, null, null, true, null, null, null, null);\r
+// logFreeTextSearchResults(pager, Level.DEBUG, null);\r
+ Assert.assertEquals("doTaxa & doSynonyms published only", 6, pager.getCount().intValue());\r
+ pager = taxonService.findTaxaAndNamesByFullText(TaxaAndNamesSearchMode.taxaAndSynonyms(),\r
+ "*bies", null, subtree, null, null, null, true, null, null, null, null);\r
+ Assert.assertEquals("doTaxa & doSynonyms, published only", 6, pager.getCount().intValue());\r
+ // logFreeTextSearchResults(pager, Level.ERROR, null);\r
+ pager = taxonService.findTaxaAndNamesByFullText(TaxaAndNamesSearchMode.taxaAndSynonyms(),\r
+ "?bies", null, subtree, null, null, null, true, null, null, null, null);\r
+ Assert.assertEquals("doTaxa & doSynonyms, published only", 6, pager.getCount().intValue());\r
+ // logFreeTextSearchResults(pager, Level.ERROR, null);\r
+ pager = taxonService.findTaxaAndNamesByFullText(TaxaAndNamesSearchMode.taxaAndSynonyms(),\r
+ "*", null, subtree, null, null, null, true, null, null, null, null);\r
+ Assert.assertEquals("doTaxa & doSynonyms, published only", 7, pager.getCount().intValue());\r
+ }\r
+\r
+ @Test\r
+ @DataSet\r
+ // @Ignore // FIXME: fails due org.apache.lucene.queryparser.classic.ParseException: Cannot parse 'relatedFrom.titleCache:()': Encountered " ")" ") "" at line 1, column 24.\r
+ public final void testFindTaxaAndNamesByFullText_empty_querString() throws IOException, LuceneParseException, LuceneMultiSearchException {\r
+\r
+ refreshLuceneIndex();\r
+ TaxonNode subtree = null;\r
+\r
+ Pager<SearchResult<TaxonBase>> pager;\r
+ pager = taxonService.findTaxaAndNamesByFullText(EnumSet.of(TaxaAndNamesSearchMode.doTaxa),\r
+ "", null, subtree, null, null, null, true, null, null, null, null);\r
+ Assert.assertEquals("doTaxa, published only", 7, pager.getCount().intValue());\r
+\r
+ pager = taxonService.findTaxaAndNamesByFullText(TaxaAndNamesSearchMode.taxaAndSynonyms(),\r
+ "", null, subtree, null, null, null, true, null, null, null, null);\r
+ Assert.assertEquals("doTaxa & doSynonyms, published only", 7, pager.getCount().intValue());\r
+\r
+ pager = taxonService.findTaxaAndNamesByFullText(EnumSet.of(TaxaAndNamesSearchMode.doTaxa, TaxaAndNamesSearchMode.doMisappliedNames),\r
+ null, null, subtree, null, null, null, true, null, null, null, null);\r
+ Assert.assertEquals("doTaxa & doMisappliedNames published only", 7, pager.getCount().intValue());\r
+\r
+ pager = taxonService.findTaxaAndNamesByFullText(EnumSet.of(TaxaAndNamesSearchMode.doTaxa, TaxaAndNamesSearchMode.doMisappliedNames, TaxaAndNamesSearchMode.doTaxaByCommonNames),\r
+ null, null, subtree, null, null, null, true, null, null, null, null);\r
+ Assert.assertEquals("doTaxa & doMisappliedNames & doTaxaByCommonNames , published only", 7, pager.getCount().intValue());\r
+ // logFreeTextSearchResults(pager, Level.ERROR, null);\r
+ }\r
+\r
@Test\r
@DataSet\r
//test for https://dev.e-taxonomy.eu/redmine/issues/7486\r
public final void testFindTaxaAndNamesByFullText_synonymsAndMisapplied_7486() throws IOException, LuceneParseException, LuceneMultiSearchException {\r
\r
refreshLuceneIndex();\r
+ TaxonNode subtree = null;\r
\r
//misapplied names\r
Pager<SearchResult<TaxonBase>> pager = taxonService.findTaxaAndNamesByFullText(\r
EnumSet.of(TaxaAndNamesSearchMode.doSynonyms, TaxaAndNamesSearchMode.doMisappliedNames, TaxaAndNamesSearchMode.includeUnpublished),\r
- "Abies", (Classification)null, null, null, null, true, null, null, null, null);\r
+ "Abies", (Classification)null, subtree, null, null, null, true, null, null, null, null);\r
logFreeTextSearchResults(pager, Level.DEBUG, null);\r
Assert.assertEquals("Expecting 3 entity", 3, pager.getCount().intValue());\r
Set<UUID> uuids = getTaxonUuidSet(pager);\r
Assert.assertTrue("The real synonym should be contained", uuids.contains(ABIES_SUBALPINA_UUID));\r
Assert.assertTrue("The pro parte synonym should be contained",uuids.contains(ABIES_LASIOCARPA_UUID));\r
- Assert.assertTrue("The misapplied name should be contained",uuids.contains(D_ABIES_KAWAKAMII_SEC_KOMAROV_UUID));\r
+ Assert.assertTrue("The misapplied name should be contained",uuids.contains(DESC_ABIES_KAWAKAMII_SEC_KOMAROV_UUID));\r
}\r
\r
@Test\r
public final void testFindTaxaAndNamesByFullText_PhraseQuery() throws IOException, LuceneParseException, LuceneMultiSearchException {\r
\r
refreshLuceneIndex();\r
+ TaxonNode subtree = null;\r
\r
Pager<SearchResult<TaxonBase>> pager = taxonService.findTaxaAndNamesByFullText(\r
EnumSet.of(TaxaAndNamesSearchMode.doTaxa, TaxaAndNamesSearchMode.doSynonyms, TaxaAndNamesSearchMode.includeUnpublished),\r
- "\"Abies alba\"", null, null, null, null, true, null, null, null, null);\r
+ "\"Abies alba\"", null, subtree, null, null, null, true, null, null, null, null);\r
// logPagerRecords(pager, Level.DEBUG);\r
Assert.assertEquals("doTaxa & doSynonyms with simple phrase query", 1, pager.getCount().intValue());\r
\r
pager = taxonService.findTaxaAndNamesByFullText(\r
EnumSet.of(TaxaAndNamesSearchMode.doTaxa, TaxaAndNamesSearchMode.doSynonyms, TaxaAndNamesSearchMode.includeUnpublished),\r
- "\"Abies al*\"", null, null, null, null, true, null, null, null, null);\r
+ "\"Abies al*\"", null, subtree, null, null, null, true, null, null, null, null);\r
// logPagerRecords(pager, Level.DEBUG);\r
Assert.assertEquals("doTaxa & doSynonyms with complex phrase query", 1, pager.getCount().intValue());\r
\r
pager = taxonService.findTaxaAndNamesByFullText(\r
EnumSet.of(TaxaAndNamesSearchMode.doTaxa, TaxaAndNamesSearchMode.doSynonyms, TaxaAndNamesSearchMode.includeUnpublished),\r
- "\"Abies*\"", null, null, null, null, true, null, null, null, null);\r
+ "\"Abies*\"", null, subtree, null, null, null, true, null, null, null, null);\r
+// logPagerRecords(pager, Level.DEBUG);\r
+ Assert.assertEquals("doTaxa & doSynonyms with simple phrase query", 8, pager.getCount().intValue());\r
+\r
+ pager = taxonService.findTaxaAndNamesByFullText(\r
+ EnumSet.of(TaxaAndNamesSearchMode.doTaxa, TaxaAndNamesSearchMode.doSynonyms, TaxaAndNamesSearchMode.includeUnpublished),\r
+ "\"Abies*\"", null, subtree, null, null, null, true, null, null, null, null);\r
// logPagerRecords(pager, Level.DEBUG);\r
Assert.assertEquals("doTaxa & doSynonyms with simple phrase query", 8, pager.getCount().intValue());\r
\r
public final void testFindTaxaAndNamesByFullText_Sort() throws IOException, LuceneParseException, LuceneMultiSearchException {\r
\r
refreshLuceneIndex();\r
+ TaxonNode subtree = null;\r
\r
List<OrderHint> orderHints = new ArrayList<>();\r
\r
orderHints.addAll(OrderHint.ORDER_BY_ID.asList());\r
Pager<SearchResult<TaxonBase>> pager = taxonService.findTaxaAndNamesByFullText(\r
EnumSet.of(TaxaAndNamesSearchMode.doTaxa),\r
- "Abies", null, null, null, null, true, null, null, orderHints, null);\r
+ "Abies", null, subtree, null, null, null, true, null, null, orderHints, null);\r
// logSearchResults(pager, Level.DEBUG, docFields2log);\r
int lastId = -1;\r
for(SearchResult<TaxonBase> rs : pager.getRecords()){\r
orderHints.addAll(OrderHint.ORDER_BY_ID.asList());\r
pager = taxonService.findTaxaAndNamesByFullText(\r
EnumSet.of(TaxaAndNamesSearchMode.doTaxa, TaxaAndNamesSearchMode.doSynonyms),\r
- "Abies", null, null, null, null, true, null, null, orderHints, null);\r
+ "Abies", null, subtree, null, null, null, true, null, null, orderHints, null);\r
// logSearchResults(pager, Level.DEBUG, docFields2log);\r
\r
lastId = -1;\r
orderHints.addAll(OrderHint.NOMENCLATURAL_SORT_ORDER.asList());\r
pager = taxonService.findTaxaAndNamesByFullText(\r
EnumSet.of(TaxaAndNamesSearchMode.doTaxa, TaxaAndNamesSearchMode.doSynonyms),\r
- "Abies", null, null, null, null, true, null, null, orderHints, null);\r
+ "Abies", null, subtree, null, null, null, true, null, null, orderHints, null);\r
logFreeTextSearchResults(pager, Level.DEBUG, null);\r
\r
}\r
@Ignore //ignore until #7487 is fixed\r
public final void testFindTaxaAndNamesByFullText_AreaFilter_7487() throws IOException, LuceneParseException, LuceneMultiSearchException {\r
refreshLuceneIndex();\r
+ TaxonNode subtree = null;\r
Set<NamedArea> a_germany_canada_russia = new HashSet<>();\r
a_germany_canada_russia.add(germany);\r
a_germany_canada_russia.add(canada);\r
\r
Pager<SearchResult<TaxonBase>> pager = taxonService.findTaxaAndNamesByFullText(\r
EnumSet.of(TaxaAndNamesSearchMode.doSynonyms, TaxaAndNamesSearchMode.includeUnpublished),\r
- "Abies", null, a_germany_canada_russia, present_native, null, true, null, null, null, null);\r
+ "Abies", null, subtree, a_germany_canada_russia, present_native, null, true, null, null, null, null);\r
Assert.assertEquals("Synonyms with matching area filter", 2, pager.getCount().intValue());\r
Set<UUID> uuids = this.getTaxonUuidSet(pager);\r
Assert.assertTrue("Synonym of balsamea should be in", uuids.contains(ABIES_SUBALPINA_UUID));\r
//should give same results as above\r
pager = taxonService.findTaxaAndNamesByFullText(\r
EnumSet.of(TaxaAndNamesSearchMode.doSynonyms, TaxaAndNamesSearchMode.includeUnpublished),\r
- "Abies", null, a_germany_canada_russia, present_native, null, true, null, null, null, null);\r
+ "Abies", null, subtree, a_germany_canada_russia, present_native, null, true, null, null, null, null);\r
// Assert.assertEquals("Synonyms with matching area filter", 2, pager.getCount().intValue());\r
// uuids = this.getTaxonUuidSet(pager);\r
// Assert.assertTrue("Synonym of balsamea should be in", uuids.contains(ABIES_SUBALPINA_UUID));\r
///MISAPPLIED\r
pager = taxonService.findTaxaAndNamesByFullText(\r
EnumSet.of(TaxaAndNamesSearchMode.doMisappliedNames, TaxaAndNamesSearchMode.doTaxa, TaxaAndNamesSearchMode.includeUnpublished),\r
- "Abies", null, a_germany_canada_russia, present_native, null, true, null, null, null, null);\r
+ "Abies", null, subtree, a_germany_canada_russia, present_native, null, true, null, null, null, null);\r
Assert.assertEquals("misappliedNames with matching area & status filter", 3, pager.getCount().intValue());\r
uuids = this.getTaxonUuidSet(pager);\r
- Assert.assertTrue("Misapplied name should be in", uuids.contains(D_ABIES_KAWAKAMII_SEC_KOMAROV_UUID));\r
+ Assert.assertTrue("Misapplied name should be in", uuids.contains(DESC_ABIES_KAWAKAMII_SEC_KOMAROV_UUID));\r
\r
t_abies_balsamea = (Taxon)taxonService.find(ABIES_BALSAMEA_UUID);\r
relsTo = t_abies_balsamea.getMisappliedNameRelations();\r
Assert.assertEquals(1, relsTo.size());\r
taxonRelation = relsTo.iterator().next();\r
- Assert.assertEquals(taxonRelation.getFromTaxon().getUuid(), D_ABIES_KAWAKAMII_SEC_KOMAROV_UUID);\r
+ Assert.assertEquals(taxonRelation.getFromTaxon().getUuid(), DESC_ABIES_KAWAKAMII_SEC_KOMAROV_UUID);\r
taxonRelation.setType(TaxonRelationshipType.PRO_PARTE_MISAPPLIED_NAME_FOR());\r
taxonService.saveOrUpdate(t_abies_balsamea);\r
commitAndStartNewTransaction(null);\r
//strange it works here before fixing #7487 already\r
pager = taxonService.findTaxaAndNamesByFullText(\r
EnumSet.of(TaxaAndNamesSearchMode.doMisappliedNames, TaxaAndNamesSearchMode.doTaxa, TaxaAndNamesSearchMode.includeUnpublished),\r
- "Abies", null, a_germany_canada_russia, present_native, null, true, null, null, null, null);\r
+ "Abies", null, subtree, a_germany_canada_russia, present_native, null, true, null, null, null, null);\r
Assert.assertEquals("misappliedNames with matching area & status filter", 3, pager.getCount().intValue());\r
uuids = this.getTaxonUuidSet(pager);\r
- Assert.assertTrue("Pro parte misapplied name should be in", uuids.contains(D_ABIES_KAWAKAMII_SEC_KOMAROV_UUID));\r
+ Assert.assertTrue("Pro parte misapplied name should be in", uuids.contains(DESC_ABIES_KAWAKAMII_SEC_KOMAROV_UUID));\r
\r
}\r
\r
public final void testFindTaxaAndNamesByFullText_AreaFilter() throws IOException, LuceneParseException, LuceneMultiSearchException {\r
\r
refreshLuceneIndex();\r
+ TaxonNode subtree = null;\r
\r
Set<NamedArea> a_germany_canada_russia = new HashSet<>();\r
a_germany_canada_russia.add(germany);\r
\r
Pager<SearchResult<TaxonBase>> pager = taxonService.findTaxaAndNamesByFullText(\r
EnumSet.of(TaxaAndNamesSearchMode.doTaxa, TaxaAndNamesSearchMode.includeUnpublished),\r
- "Abies", null, a_germany_canada_russia, null, null, true, null, null, null, null);\r
+ "Abies", null, subtree, a_germany_canada_russia, null, null, true, null, null, null, null);\r
logFreeTextSearchResults(pager, Level.DEBUG, null);\r
Assert.assertEquals("Synonyms with matching area filter", 2, pager.getCount().intValue());\r
Set<UUID> uuids = this.getTaxonUuidSet(pager);\r
\r
pager = taxonService.findTaxaAndNamesByFullText(\r
EnumSet.of(TaxaAndNamesSearchMode.doSynonyms, TaxaAndNamesSearchMode.includeUnpublished),\r
- "Abies", null, a_germany_canada_russia, present_native, null, true, null, null, null, null);\r
+ "Abies", null, subtree, a_germany_canada_russia, present_native, null, true, null, null, null, null);\r
Assert.assertEquals("Synonyms with matching area filter", 2, pager.getCount().intValue());\r
uuids = this.getTaxonUuidSet(pager);\r
Assert.assertTrue("Synonym of balsamea should be in", uuids.contains(ABIES_SUBALPINA_UUID));\r
\r
pager = taxonService.findTaxaAndNamesByFullText(\r
EnumSet.of(TaxaAndNamesSearchMode.doTaxa, TaxaAndNamesSearchMode.doSynonyms, TaxaAndNamesSearchMode.includeUnpublished),\r
- "Abies", null, a_germany_canada_russia, null, null, true, null, null, null, null);\r
+ "Abies", null, subtree, a_germany_canada_russia, null, null, true, null, null, null, null);\r
logFreeTextSearchResults(pager, Level.DEBUG, null);\r
Assert.assertEquals("taxa and synonyms with matching area filter", 4, pager.getCount().intValue());\r
uuids = this.getTaxonUuidSet(pager);\r
Assert.assertTrue("Accepted taxon with area should be in", uuids.contains(ABIES_BALSAMEA_UUID));\r
Assert.assertTrue("Synonym of balsamea should be in", uuids.contains(ABIES_SUBALPINA_UUID));\r
Assert.assertTrue("Pro parte synonym of balsamea should be in", uuids.contains(ABIES_LASIOCARPA_UUID));\r
- Assert.assertFalse("Misapplied name should NOT be in", uuids.contains(D_ABIES_KAWAKAMII_SEC_KOMAROV_UUID));\r
+ Assert.assertFalse("Misapplied name should NOT be in", uuids.contains(DESC_ABIES_KAWAKAMII_SEC_KOMAROV_UUID));\r
\r
pager = taxonService.findTaxaAndNamesByFullText(\r
EnumSet.of(TaxaAndNamesSearchMode.doTaxa, TaxaAndNamesSearchMode.doSynonyms, TaxaAndNamesSearchMode.includeUnpublished),\r
- "Abies", null, a_germany_canada_russia, present_native, null, true, null, null, null, null);\r
+ "Abies", null, subtree, a_germany_canada_russia, present_native, null, true, null, null, null, null);\r
Assert.assertEquals("taxa and synonyms with matching area & status filter 4", 4, pager.getCount().intValue());\r
uuids = this.getTaxonUuidSet(pager);\r
Assert.assertTrue("Synonym of balsamea should be in", uuids.contains(ABIES_SUBALPINA_UUID));\r
\r
pager = taxonService.findTaxaAndNamesByFullText(\r
EnumSet.of(TaxaAndNamesSearchMode.doTaxa, TaxaAndNamesSearchMode.doSynonyms, TaxaAndNamesSearchMode.includeUnpublished),\r
- "Abies", null, a_germany_canada_russia, present, null, true, null, null, null, null);\r
+ "Abies", null, subtree, a_germany_canada_russia, present, null, true, null, null, null, null);\r
Assert.assertEquals("taxa and synonyms with matching area & status filter 3", 3, pager.getCount().intValue());\r
uuids = this.getTaxonUuidSet(pager);\r
Assert.assertTrue("Abies balsamea (accepted taxon) should be in", uuids.contains(ABIES_BALSAMEA_UUID));\r
\r
pager = taxonService.findTaxaAndNamesByFullText(\r
EnumSet.of(TaxaAndNamesSearchMode.doTaxa, TaxaAndNamesSearchMode.doSynonyms, TaxaAndNamesSearchMode.includeUnpublished),\r
- "Abies", null, a_russia, present, null, true, null, null, null, null);\r
+ "Abies", null, subtree, a_russia, present, null, true, null, null, null, null);\r
Assert.assertEquals("taxa and synonyms with non matching area & status filter", 0, pager.getCount().intValue());\r
\r
pager = taxonService.findTaxaAndNamesByFullText(\r
EnumSet.of(TaxaAndNamesSearchMode.doTaxaByCommonNames, TaxaAndNamesSearchMode.includeUnpublished),\r
- "Tanne", null, a_germany_canada_russia, present_native, null, true, null, null, null, null);\r
+ "Tanne", null, subtree, a_germany_canada_russia, present_native, null, true, null, null, null, null);\r
Assert.assertEquals("ByCommonNames with area filter", 1, pager.getCount().intValue());\r
uuids = this.getTaxonUuidSet(pager);\r
Assert.assertTrue("Abies balsamea should be in", uuids.contains(ABIES_BALSAMEA_UUID));\r
// abies_kawakamii_sensu_komarov as misapplied name for t_abies_balsamea\r
pager = taxonService.findTaxaAndNamesByFullText(\r
EnumSet.of(TaxaAndNamesSearchMode.doMisappliedNames, TaxaAndNamesSearchMode.includeUnpublished),\r
- "Abies", null, a_germany_canada_russia, present_native, null, true, null, null, null, null);\r
+ "Abies", null, subtree, a_germany_canada_russia, present_native, null, true, null, null, null, null);\r
Assert.assertEquals("misappliedNames with matching area & status filter", 1, pager.getCount().intValue());\r
uuids = this.getTaxonUuidSet(pager);\r
- Assert.assertTrue("Misapplied name should be in", uuids.contains(D_ABIES_KAWAKAMII_SEC_KOMAROV_UUID));\r
+ Assert.assertTrue("Misapplied name should be in", uuids.contains(DESC_ABIES_KAWAKAMII_SEC_KOMAROV_UUID));\r
\r
\r
// 1. remove existing taxon relation\r
\r
pager = taxonService.findTaxaAndNamesByFullText(\r
EnumSet.of(TaxaAndNamesSearchMode.doMisappliedNames, TaxaAndNamesSearchMode.includeUnpublished),\r
- "Abies", null, a_germany_canada_russia, present_native, null, true, null, null, null, null);\r
+ "Abies", null, subtree, a_germany_canada_russia, present_native, null, true, null, null, null, null);\r
Assert.assertEquals("misappliedNames with matching area & status filter, should match nothing now", 0, pager.getCount().intValue());\r
\r
// 2. now add abies_kawakamii_sensu_komarov as misapplied name for t_abies_alba and search for misapplications in Russia: ABSENT\r
- Taxon t_abies_kawakamii_sensu_komarov = (Taxon)taxonService.find(D_ABIES_KAWAKAMII_SEC_KOMAROV_UUID);\r
+ Taxon t_abies_kawakamii_sensu_komarov = (Taxon)taxonService.find(DESC_ABIES_KAWAKAMII_SEC_KOMAROV_UUID);\r
Taxon t_abies_alba = (Taxon)taxonService.find(ABIES_ALBA_UUID);\r
t_abies_alba.addMisappliedName(t_abies_kawakamii_sensu_komarov, null, null);\r
taxonService.update(t_abies_kawakamii_sensu_komarov);\r
\r
pager = taxonService.findTaxaAndNamesByFullText(\r
EnumSet.of(TaxaAndNamesSearchMode.doMisappliedNames, TaxaAndNamesSearchMode.includeUnpublished),\r
- "Abies", null, a_germany_canada_russia, absent, null, true, null, null, null, null);\r
+ "Abies", null, subtree, a_germany_canada_russia, absent, null, true, null, null, null, null);\r
Assert.assertEquals("misappliedNames with matching area & status filter, should find one", 1, pager.getCount().intValue());\r
uuids = this.getTaxonUuidSet(pager);\r
- Assert.assertTrue("Misapplied name should be in", uuids.contains(D_ABIES_KAWAKAMII_SEC_KOMAROV_UUID));\r
+ Assert.assertTrue("Misapplied name should be in", uuids.contains(DESC_ABIES_KAWAKAMII_SEC_KOMAROV_UUID));\r
\r
}\r
\r
//http://dev.e-taxonomy.eu/trac/ticket/5477\r
public final void testFindTaxaAndNamesByFullText_AreaFilter_issue5477() throws IOException, LuceneParseException, LuceneMultiSearchException {\r
\r
+ TaxonNode subtree = null;\r
Set<NamedArea> a_germany_canada_russia = new HashSet<>();\r
a_germany_canada_russia.add(germany);\r
a_germany_canada_russia.add(canada);\r
Set<PresenceAbsenceTerm> absent = new HashSet<>();\r
absent.add(PresenceAbsenceTerm.ABSENT());\r
\r
- Taxon t_abies_kawakamii_sensu_komarov = (Taxon)taxonService.find(D_ABIES_KAWAKAMII_SEC_KOMAROV_UUID);\r
+ Taxon t_abies_kawakamii_sensu_komarov = (Taxon)taxonService.find(DESC_ABIES_KAWAKAMII_SEC_KOMAROV_UUID);\r
Taxon t_abies_alba = (Taxon)taxonService.find(ABIES_ALBA_UUID);\r
t_abies_alba.addMisappliedName(t_abies_kawakamii_sensu_komarov, null, null);\r
\r
\r
Pager<SearchResult<TaxonBase>> pager = taxonService.findTaxaAndNamesByFullText(\r
EnumSet.of(TaxaAndNamesSearchMode.doMisappliedNames),\r
- "Abies", null, a_germany_canada_russia, absent, null, true, null, null, null, null);\r
+ "Abies", null, subtree, a_germany_canada_russia, absent, null, true, null, null, null, null);\r
Assert.assertEquals("misappliedNames with matching area & status filter, should find one", 1, pager.getCount().intValue());\r
}\r
\r
public final void testFindByEverythingFullText() throws IOException, LuceneParseException, LuceneMultiSearchException {\r
\r
refreshLuceneIndex();\r
-\r
+ TaxonNode subtree = null;\r
+ EnumSet<TaxaAndNamesSearchMode> mode = TaxaAndNamesSearchMode.taxaAndSynonymsWithUnpublished();\r
// via Taxon\r
- Pager<SearchResult<TaxonBase>>pager = taxonService.findByEverythingFullText("Abies", null, includeUnpublished, null, true, null, null, null, null);\r
+ Pager<SearchResult<TaxonBase>>pager = taxonService.findByEverythingFullText("Abies", null, subtree, includeUnpublished, null, true, null, null, null, null);\r
+// Pager<SearchResult<TaxonBase>> pager = taxonService.findTaxaAndNamesByFullText(mode,\r
+// "Abies", null, null, null, null, true, null, null, null, null);\r
logFreeTextSearchResults(pager, Level.DEBUG, null);\r
Assert.assertTrue("Expecting at least 7 entities for 'Abies'", pager.getCount() > 7);\r
Assert.assertNotNull("Expecting entity", pager.getRecords().get(0).getEntity());\r
- Assert.assertEquals("Expecting Taxon entity", Taxon.class, pager.getRecords().get(0).getEntity().getClass());\r
+// Assert.assertEquals("Expecting Taxon entity", Taxon.class, pager.getRecords().get(0).getEntity().getClass());\r
\r
// via DescriptionElement\r
- pager = taxonService.findByEverythingFullText("present", null, includeUnpublished, null, true, null, null, null, null);\r
+ pager = taxonService.findByEverythingFullText("present", null, subtree, includeUnpublished, null, true, null, null, null, null);\r
+ //this is not covered by findTaxaAndNamesByFullText\r
+// pager = taxonService.findTaxaAndNamesByFullText(mode,\r
+// "present", null, null, null, null, true, null, null, null, null);\r
Assert.assertEquals("Expecting one entity when searching for area 'present'", 1, pager.getCount().intValue());\r
Assert.assertNotNull("Expecting entity", pager.getRecords().get(0).getEntity());\r
Assert.assertEquals("Expecting Taxon entity", Taxon.class, CdmBase.deproxy(pager.getRecords().get(0).getEntity()).getClass());\r
public final void findByEveryThingFullText() throws IOException, LuceneParseException, LuceneMultiSearchException {\r
\r
refreshLuceneIndex();\r
+ TaxonNode subtree = null;\r
\r
- Pager<SearchResult<TaxonBase>> pager = taxonService.findByEverythingFullText("genus", null, includeUnpublished, null, false, null, null, null, null); // --> 1\r
+ Classification classification = null;\r
+ EnumSet<TaxaAndNamesSearchMode> mode = TaxaAndNamesSearchMode.taxaAndSynonymsWithUnpublished();\r
+\r
+ Pager<SearchResult<TaxonBase>> pager = taxonService.findByEverythingFullText("genus", null, subtree, includeUnpublished, null, false, null, null, null, null); // --> 1\r
+// Pager<SearchResult<TaxonBase>> pager = taxonService.findTaxaAndNamesByFullText(mode,\r
+// "genus", classification, null, null, null, false, null, null, null, null);\r
Assert.assertEquals("Expecting 1 entity", 1, pager.getCount().intValue());\r
\r
//FIXME FAILS: abies balamea is returned twice, see also testFullText_Grouping()\r
- pager = taxonService.findByEverythingFullText("Balsam", null, includeUnpublished, Arrays.asList(new Language[]{Language.GERMAN()}), false, null, null, null, null);\r
+ pager = taxonService.findByEverythingFullText("Balsam", null, subtree, includeUnpublished, Arrays.asList(new Language[]{Language.GERMAN()}), false, null, null, null, null);\r
logFreeTextSearchResults(pager, Level.DEBUG, null);\r
+// pager = taxonService.findTaxaAndNamesByFullText(EnumSet.allOf(TaxaAndNamesSearchMode.class),\r
+// "Balsam", classification, null, null, Arrays.asList(new Language[]{Language.GERMAN()}), false, null, null, null, null);\r
Assert.assertEquals("expecting to find the Abies balsamea via the GERMAN DescriptionElements", 1, pager.getCount().intValue());\r
\r
- pager = taxonService.findByEverythingFullText("Abies", null, includeUnpublished, null, true, null, null, null, null);\r
+ //TODO fieldHighlight does not yet work\r
+ pager = taxonService.findByEverythingFullText("Abies", null, subtree, includeUnpublished, null, true, null, null, null, null);\r
+// pager = taxonService.findTaxaAndNamesByFullText(mode,\r
+// "Abies", classification, null, null, Arrays.asList(new Language[]{Language.GERMAN()}), false, null, null, null, null);\r
Assert.assertEquals("Expecting 8 entities", 8, pager.getCount().intValue());\r
SearchResult<TaxonBase> searchResult = pager.getRecords().get(0);\r
Assert.assertTrue("the map of highlighted fragments should contain at least one item", searchResult.getFieldHighlightMap().size() > 0);\r
@Test\r
@DataSet\r
public final void benchmarkFindByCommonNameLucene() throws IOException, LuceneParseException {\r
-\r
+ TaxonNode subtree = null;\r
createRandomTaxonWithCommonName(NUM_OF_NEW_RADOM_ENTITIES);\r
\r
refreshLuceneIndex();\r
\r
long startMillis = System.currentTimeMillis();\r
for (int indx = 0; indx < BENCHMARK_ROUNDS; indx++) {\r
- pager = taxonService.findByDescriptionElementFullText(CommonTaxonName.class, "Wei"+UTF8.SHARP_S+"*", null, null, null, false, null, null, null, null);\r
+ pager = taxonService.findByDescriptionElementFullText(CommonTaxonName.class, "Wei"+UTF8.SHARP_S+"*", null, subtree, null, null, false, null, null, null, null);\r
if (logger.isDebugEnabled()) {\r
logger.debug("[" + indx + "]" + pager.getRecords().get(0).getEntity().getTitleCache());\r
}\r
TaxonDescription d_abies_alba = TaxonDescription.NewInstance(t_abies_alba);\r
TaxonDescription d_abies_balsamea = TaxonDescription.NewInstance(t_abies_balsamea);\r
\r
- d_abies_alba.setUuid(D_ABIES_ALBA_UUID);\r
- d_abies_balsamea.setUuid(D_ABIES_BALSAMEA_UUID);\r
+ d_abies_alba.setUuid(DESC_ABIES_ALBA_UUID);\r
+ d_abies_balsamea.setUuid(DESC_ABIES_BALSAMEA_UUID);\r
\r
\r
// CommonTaxonName\r
--- /dev/null
+/**
+* Copyright (C) 2018 EDIT
+* European Distributed Institute of Taxonomy
+* http://www.e-taxonomy.eu
+*
+* The contents of this file are subject to the Mozilla Public License Version 1.1
+* See LICENSE.TXT at the top of this package for the full license terms.
+*/
+package eu.etaxonomy.cdm.api.service;
+
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+
+import java.io.FileNotFoundException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.log4j.Logger;
+import org.junit.Before;
+import org.junit.Test;
+
+import eu.etaxonomy.cdm.api.service.exception.RegistrationValidationException;
+import eu.etaxonomy.cdm.api.service.name.TypeDesignationSetManager;
+import eu.etaxonomy.cdm.api.service.name.TypeDesignationSetManager.TypeDesignationWorkingSet;
+import eu.etaxonomy.cdm.model.common.IdentifiableSource;
+import eu.etaxonomy.cdm.model.media.Media;
+import eu.etaxonomy.cdm.model.name.NameTypeDesignation;
+import eu.etaxonomy.cdm.model.name.Rank;
+import eu.etaxonomy.cdm.model.name.SpecimenTypeDesignation;
+import eu.etaxonomy.cdm.model.name.SpecimenTypeDesignationStatus;
+import eu.etaxonomy.cdm.model.name.TaxonName;
+import eu.etaxonomy.cdm.model.name.TaxonNameFactory;
+import eu.etaxonomy.cdm.model.name.TypeDesignationBase;
+import eu.etaxonomy.cdm.model.name.TypeDesignationStatusBase;
+import eu.etaxonomy.cdm.model.occurrence.DerivationEvent;
+import eu.etaxonomy.cdm.model.occurrence.DerivedUnit;
+import eu.etaxonomy.cdm.model.occurrence.FieldUnit;
+import eu.etaxonomy.cdm.model.occurrence.MediaSpecimen;
+import eu.etaxonomy.cdm.model.occurrence.SpecimenOrObservationType;
+import eu.etaxonomy.cdm.model.reference.Reference;
+import eu.etaxonomy.cdm.model.reference.ReferenceFactory;
+import eu.etaxonomy.cdm.ref.TypedEntityReference;
+import eu.etaxonomy.cdm.test.integration.CdmTransactionalIntegrationTest;
+/**
+ * @author a.kohlbecker, k.luther
+ * @since 03.09.2018
+ *
+ */
+public class TypeDesignationSetManagerTest extends CdmTransactionalIntegrationTest{
+
+ private NameTypeDesignation ntd;
+ private SpecimenTypeDesignation std_IT;
+ private SpecimenTypeDesignation std_HT;
+ private SpecimenTypeDesignation std_IT_2;
+ private SpecimenTypeDesignation std_IT_3;
+ private SpecimenTypeDesignation mtd_HT_published;
+ private SpecimenTypeDesignation mtd_IT_unpublished;
+
+ @Before
+ public void init(){
+
+ ntd = NameTypeDesignation.NewInstance();
+ ntd.setId(1);
+ TaxonName typeName = TaxonNameFactory.NewBacterialInstance(Rank.SPECIES());
+ typeName.setTitleCache("Prionus L.", true);
+ ntd.setTypeName(typeName);
+ Reference citation = ReferenceFactory.newGeneric();
+ citation.setTitleCache("Species Plantarum", true);
+ ntd.setCitation(citation);
+
+ FieldUnit fu_1 = FieldUnit.NewInstance();
+ fu_1.setId(1);
+ fu_1.setTitleCache("Testland, near Bughausen, A.Kohlbecker 81989, 2017", true);
+
+ FieldUnit fu_2 = FieldUnit.NewInstance();
+ fu_2.setId(2);
+ fu_2.setTitleCache("Dreamland, near Kissingen, A.Kohlbecker 66211, 2017", true);
+
+ std_HT = SpecimenTypeDesignation.NewInstance();
+ std_HT.setId(1);
+ DerivedUnit specimen_HT = DerivedUnit.NewInstance(SpecimenOrObservationType.PreservedSpecimen);
+ specimen_HT.setTitleCache("OHA", true);
+ createDerivationEvent(fu_1, specimen_HT);
+ specimen_HT.getOriginals().add(fu_1);
+ std_HT.setTypeSpecimen(specimen_HT);
+ std_HT.setTypeStatus(SpecimenTypeDesignationStatus.HOLOTYPE());
+
+ std_IT = SpecimenTypeDesignation.NewInstance();
+ std_IT.setId(2);
+ DerivedUnit specimen_IT = DerivedUnit.NewInstance(SpecimenOrObservationType.PreservedSpecimen);
+ specimen_IT.setTitleCache("BER", true);
+ createDerivationEvent(fu_1, specimen_IT);
+ std_IT.setTypeSpecimen(specimen_IT);
+ std_IT.setTypeStatus(SpecimenTypeDesignationStatus.ISOTYPE());
+
+ std_IT_2 = SpecimenTypeDesignation.NewInstance();
+ std_IT_2.setId(3);
+ DerivedUnit specimen_IT_2 = DerivedUnit.NewInstance(SpecimenOrObservationType.PreservedSpecimen);
+ specimen_IT_2.setTitleCache("KEW", true);
+ createDerivationEvent(fu_1, specimen_IT_2);
+ std_IT_2.setTypeSpecimen(specimen_IT_2);
+ std_IT_2.setTypeStatus(SpecimenTypeDesignationStatus.ISOTYPE());
+
+ std_IT_3 = SpecimenTypeDesignation.NewInstance();
+ std_IT_3.setId(4);
+ DerivedUnit specimen_IT_3 = DerivedUnit.NewInstance(SpecimenOrObservationType.PreservedSpecimen);
+ specimen_IT_3.setTitleCache("M", true);
+ createDerivationEvent(fu_2, specimen_IT_3);
+ std_IT_3.setTypeSpecimen(specimen_IT_3);
+ std_IT_3.setTypeStatus(SpecimenTypeDesignationStatus.ISOTYPE());
+
+ mtd_HT_published = SpecimenTypeDesignation.NewInstance();
+ mtd_HT_published.setId(5);
+ MediaSpecimen mediaSpecimen_published = (MediaSpecimen)DerivedUnit.NewInstance(SpecimenOrObservationType.Media);
+ Media media = Media.NewInstance();
+ Reference ref = ReferenceFactory.newGeneric();
+ ref.setTitleCache("A.K. & W.K (2008) Algae of the BGBM", true);
+ media.addSource(IdentifiableSource.NewPrimaryMediaSourceInstance(ref, "p.33"));
+ mediaSpecimen_published.setMediaSpecimen(media);
+ createDerivationEvent(fu_1, mediaSpecimen_published);
+ mtd_HT_published.setTypeSpecimen(mediaSpecimen_published);
+ mtd_HT_published.setTypeStatus(SpecimenTypeDesignationStatus.HOLOTYPE());
+
+ mtd_IT_unpublished = SpecimenTypeDesignation.NewInstance();
+ mtd_IT_unpublished.setId(6);
+ MediaSpecimen mediaSpecimen_unpublished = (MediaSpecimen)DerivedUnit.NewInstance(SpecimenOrObservationType.Media);
+ eu.etaxonomy.cdm.model.occurrence.Collection collection = eu.etaxonomy.cdm.model.occurrence.Collection.NewInstance();
+ collection.setCode("B");
+ mediaSpecimen_unpublished.setCollection(collection);
+ mediaSpecimen_unpublished.setAccessionNumber("Slide A565656");
+ createDerivationEvent(fu_1, mediaSpecimen_unpublished);
+ mtd_IT_unpublished.setTypeSpecimen(mediaSpecimen_unpublished);
+ mtd_IT_unpublished.setTypeStatus(SpecimenTypeDesignationStatus.ISOTYPE());
+
+ }
+
+ /**
+ * @param fu_1
+ * @param specimen_IT_2
+ */
+ protected void createDerivationEvent(FieldUnit fu_1, DerivedUnit specimen_IT_2) {
+ DerivationEvent derivationEvent_3 = DerivationEvent.NewInstance();
+ derivationEvent_3.addOriginal(fu_1);
+ derivationEvent_3.addDerivative(specimen_IT_2);
+ }
+
+ @Test
+ public void test1() throws RegistrationValidationException{
+
+ List<TypeDesignationBase> tds = new ArrayList<>();
+ tds.add(ntd);
+ tds.add(std_IT);
+ tds.add(std_HT);
+ tds.add(std_IT_2);
+ tds.add(std_IT_3);
+
+ TaxonName typifiedName = TaxonNameFactory.NewBacterialInstance(Rank.SPECIES());
+ typifiedName.setTitleCache("Prionus coriatius L.", true);
+
+ typifiedName.addTypeDesignation(ntd, false);
+ typifiedName.addTypeDesignation(std_HT, false);
+ typifiedName.addTypeDesignation(std_IT, false);
+ typifiedName.addTypeDesignation(std_IT_2, false);
+ typifiedName.addTypeDesignation(std_IT_3, false);
+
+ TypeDesignationSetManager typeDesignationManager = new TypeDesignationSetManager(tds);
+ String result = typeDesignationManager.print();
+
+ Logger.getLogger(this.getClass()).debug(result);
+ assertNotNull(result);
+ assertEquals(
+ "Prionus coriatius L. Type: Dreamland, near Kissingen, A.Kohlbecker 66211, 2017 Isotype, M; Type: Testland, near Bughausen, A.Kohlbecker 81989, 2017 Holotype, OHA; Isotypes: BER, KEW; NameType: Prionus L. Species Plantarum"
+ , result
+ );
+
+ LinkedHashMap<TypedEntityReference, TypeDesignationWorkingSet> orderedTypeDesignations =
+ typeDesignationManager.getOrderdTypeDesignationWorkingSets();
+ Iterator<TypeDesignationWorkingSet> byStatusMapIterator = orderedTypeDesignations.values().iterator();
+ Map<TypeDesignationStatusBase<?>, Collection<TypedEntityReference>> byStatusMap_1 = byStatusMapIterator.next();
+ Map<TypeDesignationStatusBase<?>, Collection<TypedEntityReference>> byStatusMap_2 = byStatusMapIterator.next();
+ Iterator<TypeDesignationStatusBase<?>> keyIt_1 = byStatusMap_1.keySet().iterator();
+ assertEquals("Isotype", keyIt_1.next().getLabel());
+ Iterator<TypeDesignationStatusBase<?>> keyIt_2 = byStatusMap_2.keySet().iterator();
+ assertEquals("Holotype", keyIt_2.next().getLabel());
+ assertEquals("Isotype", keyIt_2.next().getLabel());
+ }
+
+ @Test
+ public void test2() throws RegistrationValidationException{
+
+ TaxonName typifiedName = TaxonNameFactory.NewBacterialInstance(Rank.SPECIES());
+ typifiedName.setTitleCache("Prionus coriatius L.", true);
+
+ TypeDesignationSetManager typeDesignationManager = new TypeDesignationSetManager(typifiedName);
+ String result = typeDesignationManager.print();
+ Logger.getLogger(this.getClass()).debug(result);
+ assertNotNull(result);
+ assertEquals(
+ "Prionus coriatius L."
+ , result
+ );
+
+ typifiedName.addTypeDesignation(ntd, false);
+ typeDesignationManager.addTypeDesigations(null, ntd);
+
+ assertEquals(
+ "Prionus coriatius L. NameType: Prionus L. Species Plantarum"
+ , typeDesignationManager.print()
+ );
+
+ typifiedName.addTypeDesignation(std_HT, false);
+ typeDesignationManager.addTypeDesigations(null, std_HT);
+
+ assertEquals(
+ "Prionus coriatius L. Type: Testland, near Bughausen, A.Kohlbecker 81989, 2017 Holotype, OHA; NameType: Prionus L. Species Plantarum"
+ , typeDesignationManager.print()
+ );
+
+ }
+
+ @Test
+ public void test_mediaType(){
+
+ for(int i = 0; i < 10; i++ ){
+
+ init();
+ // repeat 10 times to assure the order of typedesignations is fix in the representations
+ TaxonName typifiedName = TaxonNameFactory.NewBacterialInstance(Rank.SPECIES());
+ typifiedName.setTitleCache("Prionus coriatius L.", true);
+ typifiedName.addTypeDesignation(mtd_HT_published, false);
+ typifiedName.addTypeDesignation(mtd_IT_unpublished, false);
+
+ TypeDesignationSetManager typeDesignationManager = new TypeDesignationSetManager(typifiedName);
+ typeDesignationManager.addTypeDesigations(null, mtd_HT_published);
+ typeDesignationManager.addTypeDesigations(null, mtd_IT_unpublished);
+
+ assertEquals("failed after repreating " + i + " times",
+ "Prionus coriatius L. Type: Testland, near Bughausen, A.Kohlbecker 81989, 2017 Holotype, [icon] p.33 in A.K. & W.K (2008) Algae of the BGBM; Isotype, [icon] (B Slide A565656)."
+ , typeDesignationManager.print()
+ );
+ }
+
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public void createTestDataSet() throws FileNotFoundException {
+ // TODO Auto-generated method stub
+
+ }
+
+
+
+}
import org.junit.BeforeClass;
import org.junit.Test;
-import eu.etaxonomy.cdm.api.service.dto.TaxonRelationshipsDTO.TaxonRelation;
+import eu.etaxonomy.cdm.api.service.dto.TaxonRelationshipsDTO.TaxonRelationDTO;
import eu.etaxonomy.cdm.format.taxon.TaxonRelationshipFormatter;
import eu.etaxonomy.cdm.model.agent.Person;
import eu.etaxonomy.cdm.model.common.DefaultTermInitializer;
dto.addRelation(taxonRel, Direction.relatedFrom, languages);
dto.addRelation(rel2, Direction.relatedFrom, languages);
- TaxonRelation relToDuplicate = dto.addRelation(rel3, Direction.relatedFrom, languages);
+ TaxonRelationDTO relToDuplicate = dto.addRelation(rel3, Direction.relatedFrom, languages);
dto.addRelation(rel4, Direction.relatedFrom, languages);
- TaxonRelation duplicateWithoutRelSec2 = dto.addRelation(rel5, Direction.relatedFrom, languages);
+ TaxonRelationDTO duplicateWithoutRelSec2 = dto.addRelation(rel5, Direction.relatedFrom, languages);
dto.createMisapplicationString();
public void testNewTermQuery_textfield_complex(){
QueryFactory qf = new QueryFactory(luceneIndexToolProvider, Taxon.class);
+ Assert.assertEquals("ComplexPhraseQuery", qf.newTermQuery("titleCache", "\"Lact* *ennis\"", true).getClass().getSimpleName());
Assert.assertEquals("ComplexPhraseQuery", qf.newTermQuery("titleCache", "\"Lactuca per*\"", true).getClass().getSimpleName());
+ Assert.assertEquals("ComplexPhraseQuery", qf.newTermQuery("titleCache", "\"Lact* perennis\"", true).getClass().getSimpleName());
+ Assert.assertEquals("ComplexPhraseQuery", qf.newTermQuery("titleCache", "\"Lact* per*\"", true).getClass().getSimpleName());
+ Assert.assertEquals("ComplexPhraseQuery", qf.newTermQuery("titleCache", "\"Lac*ca per*\"", true).getClass().getSimpleName());
+ Assert.assertEquals("ComplexPhraseQuery", qf.newTermQuery("titleCache", "\"Lac*ca perennis\"", true).getClass().getSimpleName());
+ Assert.assertEquals("ComplexPhraseQuery", qf.newTermQuery("titleCache", "\"Lactuca p*ennis\"", true).getClass().getSimpleName());
+ Assert.assertEquals("ComplexPhraseQuery", qf.newTermQuery("titleCache", "\"*ctuca perennis\"", true).getClass().getSimpleName());
+ Assert.assertEquals("ComplexPhraseQuery", qf.newTermQuery("titleCache", "\"*ctu* perennis\"", true).getClass().getSimpleName());
+ Assert.assertEquals("ComplexPhraseQuery", qf.newTermQuery("titleCache", "\"*ctu* *enn*\"", true).getClass().getSimpleName());
+ Assert.assertEquals("ComplexPhraseQuery", qf.newTermQuery("titleCache", "\"*ctuca per*\"", true).getClass().getSimpleName());
}
+
/**
* {@inheritDoc}
*/
import java.util.List;
import java.util.UUID;
-import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import org.hibernate.Query;
import org.hibernate.Session;
UUID uuid2 = botName2.getUuid();
try {
Logger loggerTrace = Logger.getLogger("org.hibernate.type");
- loggerTrace.setLevel(Level.TRACE);
+ //loggerTrace.setLevel(Level.TRACE);
System.out.println(logger.getName());
appCtr.getNameService().save(botName1);
<?xml version='1.0' encoding='UTF-8'?>
<dataset>
+
+ <CLASSIFICATION ID="5000" UUID="2a5ceebb-4830-4524-b330-78461bf8cb6b" ROOTNODE_ID="5000" PROTECTEDTITLECACHE="false" TITLECACHE="European Abies" MICROREFERENCE="[null]" NAME_ID="5000"/>
+ <CLASSIFICATION ID="5001" UUID="d7c741e3-ae9e-4a7d-a566-9e3a7a0b51ce" ROOTNODE_ID="5001" PROTECTEDTITLECACHE="false" TITLECACHE="Abies alternative" MICROREFERENCE="[null]" NAME_ID="5001"/>
+ <CLASSIFICATION_AUD/>
<TAXONNODE ID="5000" UUID="a8266e45-091f-432f-87ae-c625e6aa9bbc" TREEINDEX="#t5000#5000#" SORTINDEX="[null]" EXCLUDED="FALSE" UNPLACED="FALSE" COUNTCHILDREN="1" CLASSIFICATION_ID="5000" PARENT_ID="[null]" TAXON_ID="[null]"/>
- <TAXONNODE ID="5001" UUID="1ff4255d-7c6c-4d01-aaae-7acc2cd3dda1" TREEINDEX="#t5001#5001#" SORTINDEX="[null]" EXCLUDED="FALSE" UNPLACED="FALSE" COUNTCHILDREN="1" CLASSIFICATION_ID="5001" PARENT_ID="[null]" TAXON_ID="[null]"/>
<TAXONNODE ID="5002" UUID="bf379dec-349a-4b95-bb02-1d6bf785983b" TREEINDEX="#t5000#5000#5002#" SORTINDEX="0" EXCLUDED="FALSE" UNPLACED="FALSE" COUNTCHILDREN="0" CLASSIFICATION_ID="5000" PARENT_ID="5000" TAXON_ID="5003"/>
- <TAXONNODE ID="5003" UUID="54f12949-9229-416c-9246-7bbc4d0f77a5" TREEINDEX="#t5001#5001#5003#" SORTINDEX="0" EXCLUDED="FALSE" UNPLACED="FALSE" COUNTCHILDREN="0" CLASSIFICATION_ID="5001" PARENT_ID="5001" TAXON_ID="5007"/>
-
+ <TAXONNODE ID="5001" UUID="1ff4255d-7c6c-4d01-aaae-7acc2cd3dda1" TREEINDEX="#t5001#5001#" SORTINDEX="[null]" EXCLUDED="FALSE" UNPLACED="FALSE" COUNTCHILDREN="1" CLASSIFICATION_ID="5001" PARENT_ID="[null]" TAXON_ID="[null]"/>
+ <TAXONNODE ID="5003" UUID="54f12949-9229-416c-9246-7bbc4d0f77a5" TREEINDEX="#t5001#5001#5003#" SORTINDEX="0" EXCLUDED="FALSE" UNPLACED="FALSE" COUNTCHILDREN="1" CLASSIFICATION_ID="5001" PARENT_ID="5001" TAXON_ID="5007"/>
+ <!-- TAXONNODE ID="5004" UUID="55f12949-9229-416c-9246-7bbc4d0f77a5" TREEINDEX="#t5001#5001#5003#5004#" SORTINDEX="0" EXCLUDED="FALSE" UNPLACED="FALSE" COUNTCHILDREN="0" CLASSIFICATION_ID="5001" PARENT_ID="5003" TAXON_ID="5003"/>
+ -->
<TAXONBASE DTYPE="Taxon" ID="5000" UUID="3e72d306-0f83-4d4f-be84-6f85a604a2be" PROTECTEDTITLECACHE="false" TITLECACHE="Abies sec. Kohlbecker, A., Testcase standart views, 2013" DOUBTFUL="false" publish="true" USENAMECACHE="false" TAXONSTATUSUNKNOWN="false" NAME_ID="5000" SEC_ID="5000"/>
<TAXONBASE DTYPE="Taxon" ID="5001" UUID="7dbd5810-a3e5-44b6-b563-25152b8867f4" PROTECTEDTITLECACHE="false" TITLECACHE="Abies alba sec. Kohlbecker, A., Testcase standart views, 2013" DOUBTFUL="false" publish="true" USENAMECACHE="false" TAXONSTATUSUNKNOWN="false" NAME_ID="5001" SEC_ID="5000"/>
<TAXONBASE DTYPE="Synonym" ID="5002" UUID="9fee273c-c819-4f1f-913a-cd910465df51" PROTECTEDTITLECACHE="false" TITLECACHE="Abies subalpina sec. Kohlbecker, A., Testcase standart views, 2013" DOUBTFUL="false" publish="false" USENAMECACHE="false" TAXONSTATUSUNKNOWN="[null]" NAME_ID="5002" SEC_ID="5000" ACCEPTEDTAXON_ID="5003" TYPE_ID="848"/>
<HOMOTYPICALGROUP ID="5004" UUID="9db11d08-706d-48da-bbf4-2fc74b106ad8" />
<HOMOTYPICALGROUP ID="5005" UUID="63d7447a-2778-4224-8535-abbbf2d7b55c" />
<HOMOTYPICALGROUP ID="5006" UUID="053bb99d-4679-483b-a7a1-7ecd2656a7db" />
- <CLASSIFICATION ID="5000" UUID="2a5ceebb-4830-4524-b330-78461bf8cb6b" ROOTNODE_ID="5000" PROTECTEDTITLECACHE="false" TITLECACHE="European Abies" MICROREFERENCE="[null]" NAME_ID="5000"/>
- <CLASSIFICATION ID="5001" UUID="d7c741e3-ae9e-4a7d-a566-9e3a7a0b51ce" ROOTNODE_ID="5001" PROTECTEDTITLECACHE="false" TITLECACHE="Abies alternative" MICROREFERENCE="[null]" NAME_ID="5001"/>
- <CLASSIFICATION_AUD/>
<LANGUAGESTRING ID="5000" UUID="0ac3b18a-9f48-49cf-8e2d-52ac46a11afa" TEXT="European Abies" LANGUAGE_ID="406"/>
<LANGUAGESTRING ID="5001" UUID="82d9ae61-3409-433c-8925-836a8739547b" TEXT="Abies alternative" LANGUAGE_ID="406"/>
<parent>
<groupId>eu.etaxonomy</groupId>
<artifactId>cdmlib-parent</artifactId>
- <version>5.2.0</version>
+ <version>5.3.0</version>
<relativePath>../pom.xml</relativePath>
</parent>
<modelVersion>4.0.0</modelVersion>
<groupId>eu.etaxonomy</groupId>
<artifactId>cdmlib-parent</artifactId>
- <version>5.2.0</version>
+ <version>5.3.0</version>
<name>CDM Library</name>
<description>The Java implementation of the Common Data Model (CDM), the data model for EDIT's internet platform for cybertaxonomy.</description>
<url>http://cybertaxonomy.eu/cdmlib/</url>
<groupId>io.swagger</groupId>
<artifactId>swagger-annotations</artifactId>
<!-- should match the springfox-swagger2 depends on -->
- <version>1.5.6</version>
+ <version>1.5.10</version>
</dependency>
<!-- dependencies for swagger-springmvc, added explicitely -->
<dependency>
<groupId>io.springfox</groupId>
<artifactId>springfox-swagger2</artifactId>
- <version>2.4.0</version>
+ <version>2.6.1</version>
</dependency>
<!-- ******* DATABASES DRIVER ******* -->