### *** SPRING ************ ###
log4j.logger.org.springframework.transaction = warn
log4j.logger.org.hibernate.engine.LoadContexts = warn
-log4j.logger.org.springframework.orm.hibernate4.SessionFactoryUtils = warn
-log4j.logger.org.springframework.orm.hibernate4 = warn
+log4j.logger.org.springframework.orm.hibernate5.SessionFactoryUtils = warn
+log4j.logger.org.springframework.orm.hibernate5 = warn
log4j.logger.org.springframework.FileSystemXmlApplicationContext = warn;
log4j.logger.org.springframework.core.io.support = warn
-/**\r
-* Copyright (C) 2007 EDIT\r
-* European Distributed Institute of Taxonomy\r
-* http://www.e-taxonomy.eu\r
-*\r
-* The contents of this file are subject to the Mozilla Public License Version 1.1\r
-* See LICENSE.TXT at the top of this package for the full license terms.\r
-*/\r
-\r
-package eu.etaxonomy.cdm.io.common;\r
-\r
-import java.lang.reflect.Constructor;\r
-import java.lang.reflect.InvocationTargetException;\r
-import java.util.HashSet;\r
-import java.util.Set;\r
-\r
-import org.apache.commons.lang.StringUtils;\r
-import org.apache.log4j.Logger;\r
-import org.hibernate.SessionFactory;\r
-import org.springframework.beans.factory.annotation.Autowired;\r
-import org.springframework.transaction.PlatformTransactionManager;\r
-import org.springframework.transaction.TransactionDefinition;\r
-import org.springframework.transaction.TransactionStatus;\r
-import org.springframework.transaction.support.DefaultTransactionDefinition;\r
-\r
-import eu.etaxonomy.cdm.api.application.CdmApplicationDefaultConfiguration;\r
-import eu.etaxonomy.cdm.common.monitor.IProgressMonitor;\r
-import eu.etaxonomy.cdm.io.common.events.IIoEvent;\r
-import eu.etaxonomy.cdm.io.common.events.IIoObserver;\r
-import eu.etaxonomy.cdm.io.common.events.IoProblemEvent;\r
-import eu.etaxonomy.cdm.io.common.events.IoProgressEvent;\r
-import eu.etaxonomy.cdm.model.common.CdmBase;\r
-\r
-/**\r
- * @author a.mueller\r
- * @created 01.07.2008\r
- */\r
-public abstract class CdmIoBase<STATE extends IoStateBase> extends CdmApplicationDefaultConfiguration\r
- implements ICdmIO<STATE>, IIoObservable {\r
- private static final Logger logger = Logger.getLogger(CdmIoBase.class);\r
-\r
- private final Set<IIoObserver> observers = new HashSet<IIoObserver>();\r
- protected String ioName = null;\r
-\r
-\r
- /**\r
- *\r
- */\r
- public CdmIoBase() {\r
- super();\r
- this.ioName = this.getClass().getSimpleName();\r
- }\r
-\r
-//******************** Observers *********************************************************\r
-\r
- @Override\r
- public boolean addObserver(IIoObserver observer){\r
- return observers.add(observer);\r
- }\r
-\r
- @Override\r
- public Set<IIoObserver> getObservers() {\r
- return observers;\r
- }\r
-\r
- @Override\r
- public void addObservers(Set<IIoObserver> newObservers) {\r
- for (IIoObserver observer : newObservers){\r
- this.observers.add(observer);\r
- }\r
- }\r
-\r
- @Override\r
- public int countObservers(){\r
- return observers.size();\r
- }\r
-\r
- @Override\r
- public boolean removeObserver(IIoObserver observer){\r
- return observers.remove(observer);\r
- }\r
-\r
- @Override\r
- public void removeObservers(){\r
- observers.removeAll(observers);\r
- }\r
-\r
- @Override\r
- public void fire(IIoEvent event){\r
- for (IIoObserver observer: observers){\r
- observer.handleEvent(event);\r
- }\r
- }\r
-\r
-//******************** End Observers *********************************************************\r
-\r
-\r
- public int countSteps(){\r
- return 1;\r
- }\r
-\r
- @Override\r
- public boolean invoke(STATE state) {\r
- if (isIgnore( state)){\r
- logger.info("No invoke for " + ioName + " (ignored)");\r
- return true;\r
- }else{\r
- updateProgress(state, "Invoking " + ioName);\r
- doInvoke(state);\r
- return state.isSuccess();\r
- }\r
- }\r
-\r
- /**\r
- * invoke method to be implemented by implementing classes\r
- * @param state\r
- * @return\r
- */\r
- protected abstract void doInvoke(STATE state);\r
-\r
- @Autowired\r
- private SessionFactory sessionFactory;\r
-\r
- /**\r
- * flush the current session\r
- */\r
- //TODO move into super class CdmApplicationDefaultConfiguration#flush() ?\r
- public void flush() {\r
- sessionFactory.getCurrentSession().flush();\r
- }\r
-\r
- @Override\r
- //TODO seems to be exact duplicate of CdmApplicationDefaultConfiguration#startTransaction(), remove duplicate\r
- public TransactionStatus startTransaction() {\r
- return startTransaction(false);\r
- }\r
-\r
- @Override\r
- //TODO seems to be exact duplicate of CdmApplicationDefaultConfiguration#startTransaction(java.lang.Boolean)\r
- public TransactionStatus startTransaction(Boolean readOnly) {\r
-\r
- DefaultTransactionDefinition defaultTxDef = new DefaultTransactionDefinition();\r
- defaultTxDef.setReadOnly(readOnly);\r
- TransactionDefinition txDef = defaultTxDef;\r
-\r
- // Log some transaction-related debug information.\r
- if (logger.isDebugEnabled()) {\r
- logger.debug("Transaction name = " + txDef.getName());\r
- logger.debug("Transaction facets:");\r
- logger.debug("Propagation behavior = " + txDef.getPropagationBehavior());\r
- logger.debug("Isolation level = " + txDef.getIsolationLevel());\r
- logger.debug("Timeout = " + txDef.getTimeout());\r
- logger.debug("Read Only = " + txDef.isReadOnly());\r
- // org.springframework.orm.hibernate4.HibernateTransactionManager\r
- // provides more transaction/session-related debug information.\r
- }\r
-\r
- TransactionStatus txStatus = super.getTransactionManager().getTransaction(txDef);\r
- return txStatus;\r
- }\r
-\r
- @Override\r
- //TODO seems to be exact duplicate of CdmApplicationDefaultConfiguration#startTransaction(java.lang.Boolean), remove duplicate?\r
- public void commitTransaction(TransactionStatus txStatus){\r
- PlatformTransactionManager txManager = super.getTransactionManager();\r
- txManager.commit(txStatus);\r
- return;\r
- }\r
-\r
- //TODO move into super class CdmApplicationDefaultConfiguration#startTransaction(java.lang.Boolean)\r
- //==> no \r
- public void rollbackTransaction(TransactionStatus txStatus){\r
- PlatformTransactionManager txManager = super.getTransactionManager();\r
- txManager.rollback(txStatus);\r
- return;\r
- }\r
-\r
- @Override\r
- public boolean check(STATE state) {\r
- if (isIgnore(state)){\r
- logger.info("No check for " + ioName + " (ignored)");\r
- return true;\r
- }else{\r
- return doCheck(state);\r
- }\r
- }\r
-\r
- protected abstract boolean doCheck(STATE state);\r
-\r
-\r
- /**\r
- * Returns true if this (IO-)class should be ignored during the import/export process.\r
- * This information is usually stored in the configuration\r
- * @param config\r
- * @return\r
- */\r
- protected abstract boolean isIgnore(STATE state);\r
-\r
- protected <T extends CdmBase> T getInstance(Class<? extends T> clazz){\r
- T result = null;\r
- try {\r
- Constructor<? extends T> constructor = clazz.getDeclaredConstructor();\r
- constructor.setAccessible(true);\r
- result = constructor.newInstance();\r
- } catch (InstantiationException e) {\r
- logger.error("Class " + clazz.getSimpleName()+" could not be instantiated. Class = " );\r
- e.printStackTrace();\r
- } catch (IllegalAccessException e) {\r
- logger.error("Constructor of class "+clazz.getSimpleName()+" could not be accessed." );\r
- e.printStackTrace();\r
- } catch (SecurityException e) {\r
- e.printStackTrace();\r
- } catch (NoSuchMethodException e) {\r
- logger.error("SecurityException for Constructor of class "+clazz.getSimpleName()+"." );\r
- e.printStackTrace();\r
- } catch (IllegalArgumentException e) {\r
- logger.error("Empty Constructor does not exist for class "+clazz.getSimpleName()+"." );\r
- e.printStackTrace();\r
- } catch (InvocationTargetException e) {\r
- logger.error("Empty Constructor could not be invoked for class "+clazz.getSimpleName()+"." );\r
- e.printStackTrace();\r
- }\r
- return result;\r
- }\r
-\r
-\r
- protected String getSuccessString(boolean success){\r
- if (success){\r
- return "with success";\r
- }else{\r
- return "with errors";\r
- }\r
- }\r
-\r
- @Override\r
- public void updateProgress(STATE state, String message) {\r
- updateProgress(state, message, 1);\r
- }\r
-\r
- @Override\r
- public void updateProgress(STATE state, String message, int worked) {\r
- IProgressMonitor progressMonitor = state.getConfig().getProgressMonitor();\r
- if(progressMonitor != null){\r
- progressMonitor.worked(worked);\r
- progressMonitor.subTask(message);\r
- }\r
- }\r
-\r
- @Override\r
- public void warnProgress(STATE state, String message, Throwable e) {\r
- if(state.getConfig().getProgressMonitor() != null){\r
- IProgressMonitor monitor = state.getConfig().getProgressMonitor();\r
- if (e == null) {\r
- monitor.warning(message);\r
- }else{\r
- monitor.warning(message, e);\r
- }\r
- }\r
- }\r
-\r
- protected void fireProgressEvent(String message, String location) {\r
- IoProgressEvent event = new IoProgressEvent();\r
- event.setThrowingClass(this.getClass());\r
- event.setMessage(message);\r
- event.setLocation(location);\r
-// int linenumber = new Exception().getStackTrace()[0].getLineNumber();\r
- fire(event);\r
- }\r
-\r
-\r
- protected void fireWarningEvent(String message, String dataLocation, Integer severity) {\r
- fireWarningEvent(message, dataLocation, severity, 1);\r
- }\r
-\r
- protected void fireWarningEvent(String message, String dataLocation, Integer severity, int stackDepth) {\r
- stackDepth++;\r
- StackTraceElement[] stackTrace = new Exception().getStackTrace();\r
- int lineNumber = stackTrace[stackDepth].getLineNumber();\r
- String methodName = stackTrace[stackDepth].getMethodName();\r
- String className = stackTrace[stackDepth].getClassName();\r
- Class<?> declaringClass;\r
- try {\r
- declaringClass = Class.forName(className);\r
- } catch (ClassNotFoundException e) {\r
- declaringClass = this.getClass();\r
- }\r
- \r
- IoProblemEvent event = IoProblemEvent.NewInstance(declaringClass, message, dataLocation,\r
- lineNumber, severity, methodName);\r
-\r
- //for performance improvement one may read:\r
- //http://stackoverflow.com/questions/421280/in-java-how-do-i-find-the-caller-of-a-method-using-stacktrace-or-reflection\r
-// Object o = new SecurityManager().getSecurityContext();\r
-\r
-\r
- fire(event);\r
- }\r
-\r
- protected boolean isBlank(String str){\r
- return StringUtils.isBlank(str);\r
- }\r
-\r
- protected boolean isNotBlank(String str){\r
- return StringUtils.isNotBlank(str);\r
- }\r
-\r
-}\r
+/**
+* Copyright (C) 2007 EDIT
+* European Distributed Institute of Taxonomy
+* http://www.e-taxonomy.eu
+*
+* The contents of this file are subject to the Mozilla Public License Version 1.1
+* See LICENSE.TXT at the top of this package for the full license terms.
+*/
+
+package eu.etaxonomy.cdm.io.common;
+
+import java.lang.reflect.Constructor;
+import java.lang.reflect.InvocationTargetException;
+import java.util.HashSet;
+import java.util.Set;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.log4j.Logger;
+import org.hibernate.SessionFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.transaction.PlatformTransactionManager;
+import org.springframework.transaction.TransactionDefinition;
+import org.springframework.transaction.TransactionStatus;
+import org.springframework.transaction.support.DefaultTransactionDefinition;
+
+import eu.etaxonomy.cdm.api.application.CdmApplicationDefaultConfiguration;
+import eu.etaxonomy.cdm.common.monitor.IProgressMonitor;
+import eu.etaxonomy.cdm.io.common.events.IIoEvent;
+import eu.etaxonomy.cdm.io.common.events.IIoObserver;
+import eu.etaxonomy.cdm.io.common.events.IoProblemEvent;
+import eu.etaxonomy.cdm.io.common.events.IoProgressEvent;
+import eu.etaxonomy.cdm.model.common.CdmBase;
+
+/**
+ * @author a.mueller
+ * @created 01.07.2008
+ */
+public abstract class CdmIoBase<STATE extends IoStateBase> extends CdmApplicationDefaultConfiguration
+ implements ICdmIO<STATE>, IIoObservable {
+ private static final Logger logger = Logger.getLogger(CdmIoBase.class);
+
+ private final Set<IIoObserver> observers = new HashSet<IIoObserver>();
+ protected String ioName = null;
+
+
+ /**
+ *
+ */
+ public CdmIoBase() {
+ super();
+ this.ioName = this.getClass().getSimpleName();
+ }
+
+//******************** Observers *********************************************************
+
+ @Override
+ public boolean addObserver(IIoObserver observer){
+ return observers.add(observer);
+ }
+
+ @Override
+ public Set<IIoObserver> getObservers() {
+ return observers;
+ }
+
+ @Override
+ public void addObservers(Set<IIoObserver> newObservers) {
+ for (IIoObserver observer : newObservers){
+ this.observers.add(observer);
+ }
+ }
+
+ @Override
+ public int countObservers(){
+ return observers.size();
+ }
+
+ @Override
+ public boolean removeObserver(IIoObserver observer){
+ return observers.remove(observer);
+ }
+
+ @Override
+ public void removeObservers(){
+ observers.removeAll(observers);
+ }
+
+ @Override
+ public void fire(IIoEvent event){
+ for (IIoObserver observer: observers){
+ observer.handleEvent(event);
+ }
+ }
+
+//******************** End Observers *********************************************************
+
+
+ public int countSteps(){
+ return 1;
+ }
+
+ @Override
+ public boolean invoke(STATE state) {
+ if (isIgnore( state)){
+ logger.info("No invoke for " + ioName + " (ignored)");
+ return true;
+ }else{
+ updateProgress(state, "Invoking " + ioName);
+ doInvoke(state);
+ return state.isSuccess();
+ }
+ }
+
+ /**
+ * invoke method to be implemented by implementing classes
+ * @param state
+ * @return
+ */
+ protected abstract void doInvoke(STATE state);
+
+ @Autowired
+ private SessionFactory sessionFactory;
+
+ /**
+ * flush the current session
+ */
+ //TODO move into super class CdmApplicationDefaultConfiguration#flush() ?
+ public void flush() {
+ sessionFactory.getCurrentSession().flush();
+ }
+
+ @Override
+ //TODO seems to be exact duplicate of CdmApplicationDefaultConfiguration#startTransaction(), remove duplicate
+ public TransactionStatus startTransaction() {
+ return startTransaction(false);
+ }
+
+ @Override
+ //TODO seems to be exact duplicate of CdmApplicationDefaultConfiguration#startTransaction(java.lang.Boolean)
+ public TransactionStatus startTransaction(Boolean readOnly) {
+
+ DefaultTransactionDefinition defaultTxDef = new DefaultTransactionDefinition();
+ defaultTxDef.setReadOnly(readOnly);
+ TransactionDefinition txDef = defaultTxDef;
+
+ // Log some transaction-related debug information.
+ if (logger.isDebugEnabled()) {
+ logger.debug("Transaction name = " + txDef.getName());
+ logger.debug("Transaction facets:");
+ logger.debug("Propagation behavior = " + txDef.getPropagationBehavior());
+ logger.debug("Isolation level = " + txDef.getIsolationLevel());
+ logger.debug("Timeout = " + txDef.getTimeout());
+ logger.debug("Read Only = " + txDef.isReadOnly());
+ // org.springframework.orm.hibernate5.HibernateTransactionManager
+ // provides more transaction/session-related debug information.
+ }
+
+ TransactionStatus txStatus = super.getTransactionManager().getTransaction(txDef);
+ return txStatus;
+ }
+
+ @Override
+ //TODO seems to be exact duplicate of CdmApplicationDefaultConfiguration#startTransaction(java.lang.Boolean), remove duplicate?
+ public void commitTransaction(TransactionStatus txStatus){
+ PlatformTransactionManager txManager = super.getTransactionManager();
+ txManager.commit(txStatus);
+ return;
+ }
+
+ //TODO move into super class CdmApplicationDefaultConfiguration#startTransaction(java.lang.Boolean)
+ //==> no
+ public void rollbackTransaction(TransactionStatus txStatus){
+ PlatformTransactionManager txManager = super.getTransactionManager();
+ txManager.rollback(txStatus);
+ return;
+ }
+
+ @Override
+ public boolean check(STATE state) {
+ if (isIgnore(state)){
+ logger.info("No check for " + ioName + " (ignored)");
+ return true;
+ }else{
+ return doCheck(state);
+ }
+ }
+
+ protected abstract boolean doCheck(STATE state);
+
+
+ /**
+ * Returns true if this (IO-)class should be ignored during the import/export process.
+ * This information is usually stored in the configuration
+ * @param config
+ * @return
+ */
+ protected abstract boolean isIgnore(STATE state);
+
+ protected <T extends CdmBase> T getInstance(Class<? extends T> clazz){
+ T result = null;
+ try {
+ Constructor<? extends T> constructor = clazz.getDeclaredConstructor();
+ constructor.setAccessible(true);
+ result = constructor.newInstance();
+ } catch (InstantiationException e) {
+ logger.error("Class " + clazz.getSimpleName()+" could not be instantiated. Class = " );
+ e.printStackTrace();
+ } catch (IllegalAccessException e) {
+ logger.error("Constructor of class "+clazz.getSimpleName()+" could not be accessed." );
+ e.printStackTrace();
+ } catch (SecurityException e) {
+ e.printStackTrace();
+ } catch (NoSuchMethodException e) {
+ logger.error("SecurityException for Constructor of class "+clazz.getSimpleName()+"." );
+ e.printStackTrace();
+ } catch (IllegalArgumentException e) {
+ logger.error("Empty Constructor does not exist for class "+clazz.getSimpleName()+"." );
+ e.printStackTrace();
+ } catch (InvocationTargetException e) {
+ logger.error("Empty Constructor could not be invoked for class "+clazz.getSimpleName()+"." );
+ e.printStackTrace();
+ }
+ return result;
+ }
+
+
+ protected String getSuccessString(boolean success){
+ if (success){
+ return "with success";
+ }else{
+ return "with errors";
+ }
+ }
+
+ @Override
+ public void updateProgress(STATE state, String message) {
+ updateProgress(state, message, 1);
+ }
+
+ @Override
+ public void updateProgress(STATE state, String message, int worked) {
+ IProgressMonitor progressMonitor = state.getConfig().getProgressMonitor();
+ if(progressMonitor != null){
+ progressMonitor.worked(worked);
+ progressMonitor.subTask(message);
+ }
+ }
+
+ @Override
+ public void warnProgress(STATE state, String message, Throwable e) {
+ if(state.getConfig().getProgressMonitor() != null){
+ IProgressMonitor monitor = state.getConfig().getProgressMonitor();
+ if (e == null) {
+ monitor.warning(message);
+ }else{
+ monitor.warning(message, e);
+ }
+ }
+ }
+
+ protected void fireProgressEvent(String message, String location) {
+ IoProgressEvent event = new IoProgressEvent();
+ event.setThrowingClass(this.getClass());
+ event.setMessage(message);
+ event.setLocation(location);
+// int linenumber = new Exception().getStackTrace()[0].getLineNumber();
+ fire(event);
+ }
+
+
+ protected void fireWarningEvent(String message, String dataLocation, Integer severity) {
+ fireWarningEvent(message, dataLocation, severity, 1);
+ }
+
+ protected void fireWarningEvent(String message, String dataLocation, Integer severity, int stackDepth) {
+ stackDepth++;
+ StackTraceElement[] stackTrace = new Exception().getStackTrace();
+ int lineNumber = stackTrace[stackDepth].getLineNumber();
+ String methodName = stackTrace[stackDepth].getMethodName();
+ String className = stackTrace[stackDepth].getClassName();
+ Class<?> declaringClass;
+ try {
+ declaringClass = Class.forName(className);
+ } catch (ClassNotFoundException e) {
+ declaringClass = this.getClass();
+ }
+
+ IoProblemEvent event = IoProblemEvent.NewInstance(declaringClass, message, dataLocation,
+ lineNumber, severity, methodName);
+
+ //for performance improvement one may read:
+ //http://stackoverflow.com/questions/421280/in-java-how-do-i-find-the-caller-of-a-method-using-stacktrace-or-reflection
+// Object o = new SecurityManager().getSecurityContext();
+
+
+ fire(event);
+ }
+
+ protected boolean isBlank(String str){
+ return StringUtils.isBlank(str);
+ }
+
+ protected boolean isNotBlank(String str){
+ return StringUtils.isNotBlank(str);
+ }
+
+}
-### ************ APPENDER ***********************************###\r
-\r
-### direct log messages to stdout ###\r
-log4j.appender.stdout=org.apache.log4j.ConsoleAppender\r
-log4j.appender.stdout.Target=System.out\r
-log4j.appender.stdout.layout=org.apache.log4j.PatternLayout\r
-log4j.appender.stdout.layout.ConversionPattern=%d{ABSOLUTE} %5p %c{1}:%L - %m%n\r
-\r
-### direct messages to file hibernate.log ###\r
-log4j.appender.file=org.apache.log4j.FileAppender\r
-log4j.appender.file.File=hibernate.log\r
-log4j.appender.file.layout=org.apache.log4j.PatternLayout\r
-log4j.appender.file.layout.ConversionPattern=%d{ABSOLUTE} %5p %c{1}:%L - %m%n\r
-\r
-\r
-### ************* LOG LEVELS *********************************###\r
-\r
-### set log levels - for more verbose logging change 'info' to 'debug' ###\r
-\r
-log4j.rootLogger=info, stdout\r
-\r
- ### *** CDM *** ###\r
-\r
-log4j.logger.eu.etaxonomy.cdm = warn \r
-log4j.logger.eu.etaxonomy.cdm.api.application.eclipse = warn\r
-log4j.logger.eu.etaxonomy.cdm.io.berlinModel = info\r
-log4j.logger.eu.etaxonomy.cdm.io.berlinModel.BerlinModelTaxonIO = info\r
-log4j.logger.eu.etaxonomy.cdm.model.common.init = warn\r
-log4j.logger.eu.etaxonomy.cdm.io.taxonx2013 = info\r
-\r
- ### *** SPRING ************ ###\r
-log4j.logger.org.springframework.transaction = warn\r
-log4j.logger.org.hibernate.engine.LoadContexts = error\r
-log4j.logger.org.springframework.orm.hibernate4.SessionFactoryUtils = info\r
-log4j.logger.org.springframework.orm.hibernate4 = info\r
-log4j.logger.org.springframework.FileSystemXmlApplicationContext = warn;\r
-log4j.logger.org.springframework.core.io.support = info\r
-\r
-\r
- ### ***HIBERNATE ************ ###\r
-\r
-log4j.logger.org.hibernate=warn\r
-\r
-### No warnings as thrown by SQLServer\r
-log4j.logger.org.hibernate.cfg = warn\r
-\r
-### No warnings as thrown by SQLServer\r
-log4j.logger.org.hibernate.util.JDBCExceptionReporter = error\r
-\r
-### log HQL query parser activity\r
-#log4j.logger.org.hibernate.hql.ast.AST=debug\r
-\r
-### log just the SQL\r
-log4j.logger.org.hibernate.SQL=warn\r
-\r
-### log JDBC bind parameters ###\r
-log4j.logger.org.hibernate.type=warn\r
-\r
-### log schema export/update ###\r
-log4j.logger.org.hibernate.tool.hbm2ddl=warn\r
-\r
-### log HQL parse trees\r
-#log4j.logger.org.hibernate.hql=debug\r
-\r
-### log cache activity ###\r
-#log4j.logger.org.hibernate.cache=debug\r
-\r
-### log transaction activity\r
-#log4j.logger.org.hibernate.transaction=debug\r
-\r
-### log JDBC resource acquisition\r
-log4j.logger.org.hibernate.jdbc=info\r
-\r
-### enable the following line if you want to track down connection ###\r
-### leakages when using DriverManagerConnectionProvider ###\r
-#log4j.logger.org.hibernate.connection.DriverManagerConnectionProvider=trace\r
+### ************ APPENDER ***********************************###
+
+### direct log messages to stdout ###
+log4j.appender.stdout=org.apache.log4j.ConsoleAppender
+log4j.appender.stdout.Target=System.out
+log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
+log4j.appender.stdout.layout.ConversionPattern=%d{ABSOLUTE} %5p %c{1}:%L - %m%n
+
+### direct messages to file hibernate.log ###
+log4j.appender.file=org.apache.log4j.FileAppender
+log4j.appender.file.File=hibernate.log
+log4j.appender.file.layout=org.apache.log4j.PatternLayout
+log4j.appender.file.layout.ConversionPattern=%d{ABSOLUTE} %5p %c{1}:%L - %m%n
+
+
+### ************* LOG LEVELS *********************************###
+
+### set log levels - for more verbose logging change 'info' to 'debug' ###
+
+log4j.rootLogger=info, stdout
+
+ ### *** CDM *** ###
+
+log4j.logger.eu.etaxonomy.cdm = warn
+log4j.logger.eu.etaxonomy.cdm.api.application.eclipse = warn
+log4j.logger.eu.etaxonomy.cdm.io.berlinModel = info
+log4j.logger.eu.etaxonomy.cdm.io.berlinModel.BerlinModelTaxonIO = info
+log4j.logger.eu.etaxonomy.cdm.model.common.init = warn
+log4j.logger.eu.etaxonomy.cdm.io.taxonx2013 = info
+
+ ### *** SPRING ************ ###
+log4j.logger.org.springframework.transaction = warn
+log4j.logger.org.hibernate.engine.LoadContexts = error
+log4j.logger.org.springframework.orm.hibernate5.SessionFactoryUtils = info
+log4j.logger.org.springframework.orm.hibernate5 = info
+log4j.logger.org.springframework.FileSystemXmlApplicationContext = warn;
+log4j.logger.org.springframework.core.io.support = info
+
+
+ ### ***HIBERNATE ************ ###
+
+log4j.logger.org.hibernate=warn
+
+### No warnings as thrown by SQLServer
+log4j.logger.org.hibernate.cfg = warn
+
+### No warnings as thrown by SQLServer
+log4j.logger.org.hibernate.util.JDBCExceptionReporter = error
+
+### log HQL query parser activity
+#log4j.logger.org.hibernate.hql.ast.AST=debug
+
+### log just the SQL
+log4j.logger.org.hibernate.SQL=warn
+
+### log JDBC bind parameters ###
+log4j.logger.org.hibernate.type=warn
+
+### log schema export/update ###
+log4j.logger.org.hibernate.tool.hbm2ddl=warn
+
+### log HQL parse trees
+#log4j.logger.org.hibernate.hql=debug
+
+### log cache activity ###
+#log4j.logger.org.hibernate.cache=debug
+
+### log transaction activity
+#log4j.logger.org.hibernate.transaction=debug
+
+### log JDBC resource acquisition
+log4j.logger.org.hibernate.jdbc=info
+
+### enable the following line if you want to track down connection ###
+### leakages when using DriverManagerConnectionProvider ###
+#log4j.logger.org.hibernate.connection.DriverManagerConnectionProvider=trace
}\r
}\r
\r
-\r
-\r
@Override\r
public void nullSafeSet(PreparedStatement statement, Object value, int index, SessionImplementor session)\r
throws HibernateException, SQLException {\r
import javax.persistence.Entity;
import javax.persistence.FetchType;
+import javax.persistence.JoinColumn;
+import javax.persistence.JoinTable;
import javax.persistence.ManyToMany;
import javax.persistence.ManyToOne;
import javax.validation.constraints.Size;
@XmlIDREF
@XmlSchemaType(name = "IDREF")
@ManyToMany(fetch = FetchType.LAZY)
+ //preliminary #5369
+ @JoinTable(joinColumns = @JoinColumn( name="AgentBase_id"))
private Set<DefinedTerm> types; //InstitutionTypes
@XmlElement(name = "IsPartOf")
import javax.persistence.Entity;
import javax.persistence.FetchType;
+import javax.persistence.JoinColumn;
+import javax.persistence.JoinTable;
import javax.persistence.ManyToMany;
+import javax.persistence.OrderColumn;
import javax.persistence.Transient;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import org.apache.log4j.Logger;
import org.hibernate.annotations.Cascade;
import org.hibernate.annotations.CascadeType;
-import org.hibernate.annotations.IndexColumn;
+import org.hibernate.annotations.ListIndexBase;
import org.hibernate.envers.Audited;
import org.hibernate.search.annotations.Indexed;
import org.springframework.beans.factory.annotation.Configurable;
//under construction #4311
@XmlElement(name = "ProtectedCollectorTitleCache")
- private boolean protectedCollectorTitleCache = false;
-
- //An abbreviated name for the team (e. g. in case of nomenclatural authorteams).
+ private final boolean protectedCollectorTitleCache = false;
+
+ //An abbreviated name for the team (e. g. in case of nomenclatural authorteams).
//A non abbreviated name for the team (e. g.
//in case of some bibliographical references)
@XmlElementWrapper(name = "TeamMembers", nillable = true)
@XmlElement(name = "TeamMember")
@XmlIDREF
@XmlSchemaType(name = "IDREF")
- @IndexColumn(name="sortIndex", base = 0)
+ @OrderColumn(name="sortIndex")
+ @ListIndexBase(value=0) //not really needed as this is the default
@ManyToMany(fetch = FetchType.LAZY)
- @Cascade({CascadeType.SAVE_UPDATE,CascadeType.MERGE})
+ @Cascade({CascadeType.SAVE_UPDATE,CascadeType.MERGE})
@Match(MatchMode.MATCH)
+ //preliminary #5369
+ @JoinTable(joinColumns = @JoinColumn( name="AgentBase_id"))
private List<Person> teamMembers;
-
+
@XmlElement(name = "hasMoreMembers")
private boolean hasMoreMembers;
/**
* Protected nomenclatural title cache flag should be set to true, if
* the title cache is to be preferred against the atomized data.
- * This may be the case if no atomized data exists or if atomization
+ * This may be the case if no atomized data exists or if atomization
* was incomplete for whatever reason.
* @return
*/
boolean protectedNomenclaturalTitleCache) {
this.protectedNomenclaturalTitleCache = protectedNomenclaturalTitleCache;
}
-
+
/**
* The hasMoreMembers flag is true if this team has more members than
//@XmlAttribute(name = "id", required = true)
@XmlTransient
@Id
-// @GeneratedValue(generator = "system-increment")
+// @GeneratedValue(generator = "system-increment") //see also AuditEvent.revisionNumber
// @GeneratedValue(generator = "enhanced-table")
@GeneratedValue(generator = "custom-enhanced-table")
@DocumentId
-/**\r
-* Copyright (C) 2007 EDIT\r
-* European Distributed Institute of Taxonomy\r
-* http://www.e-taxonomy.eu\r
-*\r
-* The contents of this file are subject to the Mozilla Public License Version 1.1\r
-* See LICENSE.TXT at the top of this package for the full license terms.\r
-*/\r
-package eu.etaxonomy.cdm.model.common;\r
-\r
-import java.lang.reflect.Constructor;\r
-import java.util.HashMap;\r
-import java.util.Map;\r
-import java.util.UUID;\r
-\r
-import org.apache.log4j.Logger;\r
-\r
-import eu.etaxonomy.cdm.model.common.init.ITermInitializer;\r
-import eu.etaxonomy.cdm.model.common.init.ITermLoader;\r
-import eu.etaxonomy.cdm.model.common.init.TermLoader;\r
-\r
-/**\r
- * @author a.mueller\r
- *\r
- */\r
-public class DefaultTermInitializer implements ITermInitializer {\r
- @SuppressWarnings("unused")\r
- private static final Logger logger = Logger.getLogger(DefaultTermInitializer.class);\r
- protected ITermLoader termLoader = new TermLoader();\r
-\r
- @Override\r
- public void initialize() {\r
- termLoader.unloadAllTerms();\r
- doInitialize();\r
- }\r
-\r
- protected void doInitialize(){\r
- Map<UUID,DefinedTermBase> terms = new HashMap<UUID,DefinedTermBase>();\r
-\r
-// for(Class<? extends DefinedTermBase<?>> clazz : classesToInitialize) {\r
- for(VocabularyEnum vocabularyEnum : VocabularyEnum.values()) {\r
-// Class<? extends DefinedTermBase<?>> clazz = vocabularyEnum.getClazz();\r
- TermVocabulary<?> voc = termLoader.loadTerms(vocabularyEnum, terms);\r
- setDefinedTerms(vocabularyEnum.getClazz(),voc);\r
- }\r
-\r
- }\r
-\r
- protected void setDefinedTerms(Class<? extends DefinedTermBase<?>> clazz, TermVocabulary<?> vocabulary) {\r
- DefinedTermBase newInstance;\r
- newInstance = getInstance(clazz);\r
- newInstance.setDefaultTerms(vocabulary);\r
- }\r
-\r
- private <T extends DefinedTermBase> T getInstance(Class<? extends DefinedTermBase> termClass) {\r
- try {\r
- Constructor<T> c = ((Class<T>)termClass).getDeclaredConstructor();\r
- c.setAccessible(true);\r
- T termInstance = c.newInstance();\r
- return termInstance;\r
- } catch (Exception e) {\r
- throw new RuntimeException(e);\r
- }\r
- }\r
-\r
-\r
-}\r
+/**
+* Copyright (C) 2007 EDIT
+* European Distributed Institute of Taxonomy
+* http://www.e-taxonomy.eu
+*
+* The contents of this file are subject to the Mozilla Public License Version 1.1
+* See LICENSE.TXT at the top of this package for the full license terms.
+*/
+package eu.etaxonomy.cdm.model.common;
+
+import java.lang.reflect.Constructor;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.UUID;
+
+import org.apache.log4j.Logger;
+
+import eu.etaxonomy.cdm.model.common.init.ITermInitializer;
+import eu.etaxonomy.cdm.model.common.init.ITermLoader;
+import eu.etaxonomy.cdm.model.common.init.TermLoader;
+
+/**
+ * @author a.mueller
+ *
+ */
+public class DefaultTermInitializer implements ITermInitializer {
+ @SuppressWarnings("unused")
+ private static final Logger logger = Logger.getLogger(DefaultTermInitializer.class);
+ protected ITermLoader termLoader = new TermLoader();
+
+ private boolean omit = false;
+
+ @Override
+ public void initialize() {
+ termLoader.unloadAllTerms();
+ doInitialize();
+ }
+
+ protected void doInitialize(){
+ Map<UUID,DefinedTermBase> terms = new HashMap<UUID,DefinedTermBase>();
+
+// for(Class<? extends DefinedTermBase<?>> clazz : classesToInitialize) {
+ for(VocabularyEnum vocabularyEnum : VocabularyEnum.values()) {
+// Class<? extends DefinedTermBase<?>> clazz = vocabularyEnum.getClazz();
+ TermVocabulary<?> voc = termLoader.loadTerms(vocabularyEnum, terms);
+ setDefinedTerms(vocabularyEnum.getClazz(),voc);
+ }
+
+ }
+
+ protected void setDefinedTerms(Class<? extends DefinedTermBase<?>> clazz, TermVocabulary<?> vocabulary) {
+ DefinedTermBase newInstance;
+ newInstance = getInstance(clazz);
+ newInstance.setDefaultTerms(vocabulary);
+ }
+
+ private <T extends DefinedTermBase> T getInstance(Class<? extends DefinedTermBase> termClass) {
+ try {
+ Constructor<T> c = ((Class<T>)termClass).getDeclaredConstructor();
+ c.setAccessible(true);
+ T termInstance = c.newInstance();
+ return termInstance;
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+
+ public void setOmit(boolean omit) {
+ this.omit = omit;
+ }
+
+ public boolean isOmit() {
+ return omit;
+ }
+
+
+}
-/**\r
-* Copyright (C) 2007 EDIT\r
-* European Distributed Institute of Taxonomy\r
-* http://www.e-taxonomy.eu\r
-*\r
-* The contents of this file are subject to the Mozilla Public License Version 1.1\r
-* See LICENSE.TXT at the top of this package for the full license terms.\r
-*/\r
-package eu.etaxonomy.cdm.model.common;\r
-\r
-import java.util.HashSet;\r
-import java.util.Set;\r
-import java.util.UUID;\r
-\r
-import javax.persistence.Column;\r
-import javax.persistence.Entity;\r
-import javax.persistence.FetchType;\r
-import javax.persistence.ManyToMany;\r
-import javax.persistence.Table;\r
-import javax.validation.constraints.NotNull;\r
-import javax.xml.bind.annotation.XmlAccessType;\r
-import javax.xml.bind.annotation.XmlAccessorType;\r
-import javax.xml.bind.annotation.XmlElement;\r
-import javax.xml.bind.annotation.XmlElementWrapper;\r
-import javax.xml.bind.annotation.XmlIDREF;\r
-import javax.xml.bind.annotation.XmlRootElement;\r
-import javax.xml.bind.annotation.XmlSchemaType;\r
-import javax.xml.bind.annotation.XmlType;\r
-\r
-import org.apache.log4j.Logger;\r
-import org.hibernate.annotations.Cascade;\r
-import org.hibernate.annotations.CascadeType;\r
-import org.hibernate.search.annotations.Field;\r
-import org.hibernate.search.annotations.Indexed;\r
-import org.springframework.security.core.GrantedAuthority;\r
-\r
-@XmlAccessorType(XmlAccessType.FIELD)\r
-@XmlType(name = "Group", propOrder = {\r
- "name",\r
- "members",\r
- "grantedAuthorities"\r
-})\r
-@XmlRootElement(name = "Group")\r
-@Entity\r
-@Indexed(index = "eu.etaxonomy.cdm.model.common.Group")\r
-@Table(name = "PermissionGroup")\r
-public class Group extends CdmBase {\r
- private static final long serialVersionUID = 7216686200093054648L;\r
- private static final Logger logger = Logger.getLogger(Group.class);\r
-\r
- public final static UUID groupEditorUuid = UUID.fromString("22e5e8af-b99c-4884-a92f-71978efd3770");\r
- public final static UUID groupProjectManagerUuid = UUID.fromString("645191ae-32a4-4d4e-9b86-c90e0d41944a");\r
- public final static UUID groupPublisherUuid = UUID.fromString("c1f20ad8-1782-40a7-b06b-ce4773acb5ea");\r
-\r
-//*********************** FACTORY *********************/\r
-\r
- public static Group NewInstance(){\r
- return new Group();\r
- }\r
-\r
- public static Group NewInstance(String name){\r
- Group group = Group.NewInstance();\r
- group.setName(name);\r
- return group;\r
- }\r
-\r
-//**************** FIELDS ******************************/\r
-\r
- @XmlElement(name = "Name")\r
- @Column(unique = true)\r
- @Field\r
- @NotNull\r
- protected String name;\r
-\r
- @XmlElementWrapper(name = "Members")\r
- @XmlElement(name = "Member")\r
- @XmlIDREF\r
- @XmlSchemaType(name = "IDREF")\r
- @ManyToMany(fetch = FetchType.LAZY, mappedBy = "groups")\r
- @Cascade({CascadeType.REFRESH, CascadeType.MERGE}) // see #2414 (Group updating doesn't work)\r
- protected Set<User> members = new HashSet<User>();\r
-\r
- @XmlElementWrapper(name = "GrantedAuthorities")\r
- @XmlElement(name = "GrantedAuthority", type = GrantedAuthorityImpl.class)\r
- @XmlIDREF\r
- @XmlSchemaType(name = "IDREF")\r
- @ManyToMany(fetch = FetchType.LAZY, targetEntity = GrantedAuthorityImpl.class)\r
- @Cascade({CascadeType.SAVE_UPDATE,CascadeType.MERGE})\r
- protected Set <GrantedAuthority> grantedAuthorities = new HashSet<GrantedAuthority>();\r
-\r
-// ********************* CONSTRUCTOR ************************/\r
-\r
- protected Group(){\r
- super();\r
- }\r
-\r
-// *************** METHODS ***********************************/\r
-\r
- public Set<GrantedAuthority> getGrantedAuthorities() {\r
- return grantedAuthorities;\r
- }\r
-\r
- public boolean addGrantedAuthority(GrantedAuthority grantedAuthority){\r
- return grantedAuthorities.add(grantedAuthority);\r
- }\r
-\r
- public boolean removeGrantedAuthority(GrantedAuthority grantedAuthority){\r
- return grantedAuthorities.remove(grantedAuthority);\r
- }\r
-\r
- public void setName(String name) {\r
- this.name = name;\r
- }\r
-\r
- public String getName() {\r
- return name;\r
- }\r
-\r
- public Set<User> getMembers() {\r
- return members;\r
- }\r
-\r
- public boolean addMember(User user) {\r
- user.getGroups().add(this);\r
- return this.members.add(user);\r
- }\r
-\r
- public boolean removeMember(User user) {\r
- if(members.contains(user)) {\r
- user.getGroups().remove(this);\r
- return this.members.remove(user);\r
- } else {\r
- return false;\r
- }\r
- }\r
-//*********************** CLONE ********************************************************/\r
-\r
- /**\r
- * Clones <i>this</i> Group. This is a shortcut that enables to create\r
- * a new instance that differs only slightly from <i>this</i> group by\r
- * modifying only some of the attributes.\r
- *\r
- * @see eu.etaxonomy.cdm.model.common.TermBase#clone()\r
- * @see java.lang.Object#clone()\r
- */\r
- @Override\r
- public Object clone() {\r
- Group result;\r
- try{\r
- result = (Group)super.clone();\r
- result.grantedAuthorities = new HashSet<GrantedAuthority>();\r
- for (GrantedAuthority grantedauthority: this.grantedAuthorities){\r
- result.addGrantedAuthority(grantedauthority);\r
- }\r
-\r
- result.members = new HashSet<User>();\r
- for (User member: this.members){\r
- result.addMember(member);\r
- }\r
-\r
- //no changes to name\r
- return result;\r
- } catch (CloneNotSupportedException e) {\r
- logger.warn("Object does not implement cloneable");\r
- e.printStackTrace();\r
- return null;\r
-\r
- }\r
-\r
-\r
- }\r
-\r
-}\r
+/**
+* Copyright (C) 2007 EDIT
+* European Distributed Institute of Taxonomy
+* http://www.e-taxonomy.eu
+*
+* The contents of this file are subject to the Mozilla Public License Version 1.1
+* See LICENSE.TXT at the top of this package for the full license terms.
+*/
+package eu.etaxonomy.cdm.model.common;
+
+import java.util.HashSet;
+import java.util.Set;
+import java.util.UUID;
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.FetchType;
+import javax.persistence.JoinColumn;
+import javax.persistence.JoinTable;
+import javax.persistence.ManyToMany;
+import javax.persistence.Table;
+import javax.validation.constraints.NotNull;
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlElement;
+import javax.xml.bind.annotation.XmlElementWrapper;
+import javax.xml.bind.annotation.XmlIDREF;
+import javax.xml.bind.annotation.XmlRootElement;
+import javax.xml.bind.annotation.XmlSchemaType;
+import javax.xml.bind.annotation.XmlType;
+
+import org.apache.log4j.Logger;
+import org.hibernate.annotations.Cascade;
+import org.hibernate.annotations.CascadeType;
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Indexed;
+import org.springframework.security.core.GrantedAuthority;
+
+@XmlAccessorType(XmlAccessType.FIELD)
+@XmlType(name = "Group", propOrder = {
+ "name",
+ "members",
+ "grantedAuthorities"
+})
+@XmlRootElement(name = "Group")
+@Entity
+@Indexed(index = "eu.etaxonomy.cdm.model.common.Group")
+@Table(name = "PermissionGroup")
+public class Group extends CdmBase {
+ private static final long serialVersionUID = 7216686200093054648L;
+ private static final Logger logger = Logger.getLogger(Group.class);
+
+ public final static UUID groupEditorUuid = UUID.fromString("22e5e8af-b99c-4884-a92f-71978efd3770");
+ public final static UUID groupProjectManagerUuid = UUID.fromString("645191ae-32a4-4d4e-9b86-c90e0d41944a");
+ public final static UUID groupPublisherUuid = UUID.fromString("c1f20ad8-1782-40a7-b06b-ce4773acb5ea");
+
+//*********************** FACTORY *********************/
+
+ public static Group NewInstance(){
+ return new Group();
+ }
+
+ public static Group NewInstance(String name){
+ Group group = Group.NewInstance();
+ group.setName(name);
+ return group;
+ }
+
+//**************** FIELDS ******************************/
+
+ @XmlElement(name = "Name")
+ @Column(unique = true)
+ @Field
+ @NotNull
+ protected String name;
+
+ @XmlElementWrapper(name = "Members")
+ @XmlElement(name = "Member")
+ @XmlIDREF
+ @XmlSchemaType(name = "IDREF")
+ @ManyToMany(fetch = FetchType.LAZY, mappedBy = "groups")
+ @Cascade({CascadeType.REFRESH, CascadeType.MERGE}) // see #2414 (Group updating doesn't work)
+ protected Set<User> members = new HashSet<User>();
+
+ @XmlElementWrapper(name = "GrantedAuthorities")
+ @XmlElement(name = "GrantedAuthority", type = GrantedAuthorityImpl.class)
+ @XmlIDREF
+ @XmlSchemaType(name = "IDREF")
+ @ManyToMany(fetch = FetchType.LAZY, targetEntity = GrantedAuthorityImpl.class)
+ //preliminary #5369
+ @JoinTable(joinColumns = @JoinColumn( name="PermissionGroup_id"))
+ @Cascade({CascadeType.SAVE_UPDATE,CascadeType.MERGE})
+ protected Set <GrantedAuthority> grantedAuthorities = new HashSet<GrantedAuthority>();
+
+// ********************* CONSTRUCTOR ************************/
+
+ protected Group(){
+ super();
+ }
+
+// *************** METHODS ***********************************/
+
+ public Set<GrantedAuthority> getGrantedAuthorities() {
+ return grantedAuthorities;
+ }
+
+ public boolean addGrantedAuthority(GrantedAuthority grantedAuthority){
+ return grantedAuthorities.add(grantedAuthority);
+ }
+
+ public boolean removeGrantedAuthority(GrantedAuthority grantedAuthority){
+ return grantedAuthorities.remove(grantedAuthority);
+ }
+
+ public void setName(String name) {
+ this.name = name;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public Set<User> getMembers() {
+ return members;
+ }
+
+ public boolean addMember(User user) {
+ user.getGroups().add(this);
+ return this.members.add(user);
+ }
+
+ public boolean removeMember(User user) {
+ if(members.contains(user)) {
+ user.getGroups().remove(this);
+ return this.members.remove(user);
+ } else {
+ return false;
+ }
+ }
+//*********************** CLONE ********************************************************/
+
+ /**
+ * Clones <i>this</i> Group. This is a shortcut that enables to create
+ * a new instance that differs only slightly from <i>this</i> group by
+ * modifying only some of the attributes.
+ *
+ * @see eu.etaxonomy.cdm.model.common.TermBase#clone()
+ * @see java.lang.Object#clone()
+ */
+ @Override
+ public Object clone() {
+ Group result;
+ try{
+ result = (Group)super.clone();
+ result.grantedAuthorities = new HashSet<GrantedAuthority>();
+ for (GrantedAuthority grantedauthority: this.grantedAuthorities){
+ result.addGrantedAuthority(grantedauthority);
+ }
+
+ result.members = new HashSet<User>();
+ for (User member: this.members){
+ result.addMember(member);
+ }
+
+ //no changes to name
+ return result;
+ } catch (CloneNotSupportedException e) {
+ logger.warn("Object does not implement cloneable");
+ e.printStackTrace();
+ return null;
+ }
+ }
+}
-/**\r
-* Copyright (C) 2007 EDIT\r
-* European Distributed Institute of Taxonomy\r
-* http://www.e-taxonomy.eu\r
-*\r
-* The contents of this file are subject to the Mozilla Public License Version 1.1\r
-* See LICENSE.TXT at the top of this package for the full license terms.\r
-*/\r
-\r
-package eu.etaxonomy.cdm.model.common;\r
-\r
-import java.util.Collection;\r
-import java.util.HashSet;\r
-import java.util.Set;\r
-\r
-import javax.persistence.Column;\r
-import javax.persistence.Entity;\r
-import javax.persistence.FetchType;\r
-import javax.persistence.ManyToMany;\r
-import javax.persistence.OneToOne;\r
-import javax.persistence.Table;\r
-import javax.persistence.Transient;\r
-import javax.validation.constraints.NotNull;\r
-import javax.xml.bind.annotation.XmlAccessType;\r
-import javax.xml.bind.annotation.XmlAccessorType;\r
-import javax.xml.bind.annotation.XmlElement;\r
-import javax.xml.bind.annotation.XmlElementWrapper;\r
-import javax.xml.bind.annotation.XmlIDREF;\r
-import javax.xml.bind.annotation.XmlRootElement;\r
-import javax.xml.bind.annotation.XmlSchemaType;\r
-import javax.xml.bind.annotation.XmlTransient;\r
-import javax.xml.bind.annotation.XmlType;\r
-\r
-import org.apache.log4j.Logger;\r
-import org.hibernate.annotations.Cascade;\r
-import org.hibernate.annotations.CascadeType;\r
-import org.hibernate.envers.Audited;\r
-import org.hibernate.envers.NotAudited;\r
-import org.hibernate.search.annotations.Analyze;\r
-import org.hibernate.search.annotations.Field;\r
-import org.hibernate.search.annotations.Indexed;\r
-import org.hibernate.search.annotations.IndexedEmbedded;\r
-import org.springframework.security.core.Authentication;\r
-import org.springframework.security.core.GrantedAuthority;\r
-import org.springframework.security.core.context.SecurityContextHolder;\r
-import org.springframework.security.core.userdetails.UserDetails;\r
-\r
-import eu.etaxonomy.cdm.model.agent.Person;\r
-\r
-@XmlAccessorType(XmlAccessType.FIELD)\r
-@XmlType(name = "User", propOrder = {\r
- "username",\r
- "password",\r
- "emailAddress",\r
- "grantedAuthorities",\r
- "groups",\r
- "enabled",\r
- "accountNonExpired",\r
- "credentialsNonExpired",\r
- "accountNonLocked",\r
- "person"\r
-})\r
-@XmlRootElement(name = "User")\r
-@Entity\r
-@Indexed(index = "eu.etaxonomy.cdm.model.common.User")\r
-@Audited\r
-@Table(name = "UserAccount")\r
-public class User extends CdmBase implements UserDetails {\r
- private static final long serialVersionUID = 6582191171369439163L;\r
- private static final Logger logger = Logger.getLogger(User.class);\r
-\r
- // **************************** FACTORY *****************************************/\r
-\r
- public static User NewInstance(String username, String pwd){\r
- User user = new User();\r
- user.setUsername(username);\r
- user.setPassword(pwd);\r
-\r
- user.setAccountNonExpired(true);\r
- user.setAccountNonLocked(true);\r
- user.setCredentialsNonExpired(true);\r
- user.setEnabled(true);\r
-\r
- return user;\r
- }\r
-\r
- public static User NewInstance(String personTitle, String username, String pwd){\r
- User user = new User();\r
- user.setUsername(username);\r
- user.setPassword(pwd);\r
-\r
- user.setAccountNonExpired(true);\r
- user.setAccountNonLocked(true);\r
- user.setCredentialsNonExpired(true);\r
- user.setEnabled(true);\r
- Person userPerson = Person.NewTitledInstance(personTitle);\r
- user.setPerson(userPerson);\r
-\r
- return user;\r
- }\r
-\r
-//***************************** Fields *********************** /\r
-\r
- @XmlElement(name = "Username")\r
- @Column(unique = true)\r
- @Field(analyze = Analyze.NO)\r
- @NotNull\r
- protected String username;\r
-\r
- /**\r
- * a salted, MD5 encoded hash of the plaintext password\r
- */\r
- @XmlElement(name = "Password")\r
- @NotAudited\r
- protected String password;\r
-\r
- @XmlElement(name = "EmailAddress")\r
- protected String emailAddress;\r
-\r
- @XmlElementWrapper(name = "GrantedAuthorities")\r
- @XmlElement(name = "GrantedAuthority", type = GrantedAuthorityImpl.class)\r
- @XmlIDREF\r
- @XmlSchemaType(name = "IDREF")\r
- @ManyToMany(fetch = FetchType.LAZY, targetEntity = GrantedAuthorityImpl.class)\r
- @Cascade({CascadeType.SAVE_UPDATE,CascadeType.MERGE, CascadeType.REFRESH}) // see #2414 (Group updating doesn't work)\r
- @NotAudited\r
- protected Set<GrantedAuthority> grantedAuthorities = new HashSet<GrantedAuthority>(); //authorities of this user only\r
-\r
- @XmlElementWrapper(name = "Groups")\r
- @XmlElement(name = "Group")\r
- @XmlIDREF\r
- @XmlSchemaType(name = "IDREF")\r
- @ManyToMany(fetch = FetchType.LAZY)\r
- @Cascade({CascadeType.REFRESH, CascadeType.SAVE_UPDATE,CascadeType.MERGE}) // see #2414 (Group updating doesn't work)\r
- @IndexedEmbedded(depth = 1)\r
- @NotAudited\r
- protected Set<Group> groups = new HashSet<Group>();\r
-\r
- @XmlElement(name = "Enabled")\r
- protected boolean enabled;\r
-\r
- @XmlElement(name = "AccountNonExpired")\r
- protected boolean accountNonExpired;\r
-\r
- @XmlElement(name = "CredentialsNonExpired")\r
- protected boolean credentialsNonExpired;\r
-\r
- @XmlElement(name = "AccountNonLocked")\r
- protected boolean accountNonLocked;\r
-\r
- @XmlElement(name = "Person")\r
- @XmlIDREF\r
- @XmlSchemaType(name = "IDREF")\r
- @OneToOne(fetch = FetchType.LAZY)\r
- @Cascade({CascadeType.SAVE_UPDATE,CascadeType.MERGE})\r
- @IndexedEmbedded(depth = 1)\r
- protected Person person;\r
-\r
- @XmlTransient\r
- @Transient\r
- private Set<GrantedAuthority> authorities; //authorities of this user and of all groups the user belongs to\r
-\r
-//***************************** Constructor *********************** /\r
-\r
- protected User(){\r
- super();\r
- }\r
-\r
-// ***************************** METHODS ******************************/\r
-\r
- /**\r
- * Initializes or refreshes the collection of authorities, See\r
- * {@link #getAuthorities()}\r
- */\r
- //FIXME made public as preliminary solution to #4053 (Transient field User.authorities not refreshed on reloading entity)\r
- public void initAuthorities() {\r
- authorities = new HashSet<GrantedAuthority>();\r
- authorities.addAll(grantedAuthorities);\r
- for(Group group : groups) {\r
- authorities.addAll(group.getGrantedAuthorities());\r
- }\r
- }\r
-\r
- /**\r
- * Implementation of {@link UserDetails#getAuthorities()}\r
- *\r
- * {@inheritDoc}\r
- *\r
- * @return returns all {@code Set<GrantedAuthority>} instances contained in\r
- * the sets {@link #getGrantedAuthorities()} and\r
- * {@link #getGroups()}\r
- */\r
- @Override\r
- @Transient\r
- public Collection<GrantedAuthority> getAuthorities() {\r
- if(authorities == null || authorities.size() == 0) {\r
- initAuthorities();\r
- }\r
- return authorities;\r
- }\r
-\r
- @Override\r
- public String getPassword() {\r
- return password;\r
- }\r
-\r
- @Override\r
- public String getUsername() {\r
- return username;\r
- }\r
-\r
- @Override\r
- public boolean isAccountNonExpired() {\r
- return accountNonExpired;\r
- }\r
-\r
- @Override\r
- public boolean isAccountNonLocked() {\r
- return accountNonLocked;\r
- }\r
-\r
- @Override\r
- public boolean isCredentialsNonExpired() {\r
- return credentialsNonExpired;\r
- }\r
-\r
- @Override\r
- public boolean isEnabled() {\r
- return enabled;\r
- }\r
-\r
- public String getEmailAddress() {\r
- return emailAddress;\r
- }\r
-\r
- public void setEmailAddress(String emailAddress) {\r
- this.emailAddress = emailAddress;\r
- }\r
-\r
- public Set<GrantedAuthority> getGrantedAuthorities() {\r
- return grantedAuthorities;\r
- }\r
-\r
- public void setGrantedAuthorities(Set<GrantedAuthority> grantedAuthorities) {\r
- this.grantedAuthorities = grantedAuthorities;\r
- initAuthorities();\r
- }\r
-\r
- public void setUsername(String username) {\r
- this.username = username;\r
- }\r
-\r
- public void setPassword(String password) {\r
- this.password = password;\r
- }\r
-\r
- public void setEnabled(boolean enabled) {\r
- this.enabled = enabled;\r
- }\r
-\r
- public void setAccountNonExpired(boolean accountNonExpired) {\r
- this.accountNonExpired = accountNonExpired;\r
- }\r
-\r
- public void setCredentialsNonExpired(boolean credentialsNonExpired) {\r
- this.credentialsNonExpired = credentialsNonExpired;\r
- }\r
-\r
- public void setAccountNonLocked(boolean accountNonLocked) {\r
- this.accountNonLocked = accountNonLocked;\r
- }\r
-\r
- protected void setGroups(Set<Group> groups) {\r
- this.groups = groups;\r
- initAuthorities();\r
- }\r
-\r
- public Set<Group> getGroups() {\r
- return groups;\r
- }\r
-\r
-\r
- public Person getPerson() {\r
- return person;\r
- }\r
-\r
- public void setPerson(Person person) {\r
- this.person = person;\r
- }\r
-\r
- public static User getCurrentAuthenticatedUser() {\r
- Authentication authentication = SecurityContextHolder.getContext().getAuthentication();\r
- if(authentication != null && authentication.getPrincipal() != null && authentication.getPrincipal() instanceof User) {\r
- return (User)authentication.getPrincipal();\r
- }\r
- return null;\r
- }\r
-\r
-//*********************** CLONE ********************************************************/\r
-\r
- /**\r
- * Clones <i>this</i> User. This is a shortcut that enables to create\r
- * a new instance that differs only slightly from <i>this</i> User.\r
- * The corresponding person is cloned.\r
- *\r
- * @see eu.etaxonomy.cdm.model.common.CdmBase#clone()\r
- * @see java.lang.Object#clone()\r
- */\r
- @Override\r
- public Object clone() {\r
- try{\r
- User result = (User)super.clone();\r
- result.setPerson((Person)this.person.clone());\r
- return result;\r
- } catch (CloneNotSupportedException e){\r
- logger.warn("Object does not implement cloneable");\r
- e.printStackTrace();\r
- return null;\r
- }\r
-\r
-\r
- }\r
-}\r
+/**
+* Copyright (C) 2007 EDIT
+* European Distributed Institute of Taxonomy
+* http://www.e-taxonomy.eu
+*
+* The contents of this file are subject to the Mozilla Public License Version 1.1
+* See LICENSE.TXT at the top of this package for the full license terms.
+*/
+
+package eu.etaxonomy.cdm.model.common;
+
+import java.util.Collection;
+import java.util.HashSet;
+import java.util.Set;
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.FetchType;
+import javax.persistence.JoinColumn;
+import javax.persistence.JoinTable;
+import javax.persistence.ManyToMany;
+import javax.persistence.OneToOne;
+import javax.persistence.Table;
+import javax.persistence.Transient;
+import javax.validation.constraints.NotNull;
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlElement;
+import javax.xml.bind.annotation.XmlElementWrapper;
+import javax.xml.bind.annotation.XmlIDREF;
+import javax.xml.bind.annotation.XmlRootElement;
+import javax.xml.bind.annotation.XmlSchemaType;
+import javax.xml.bind.annotation.XmlTransient;
+import javax.xml.bind.annotation.XmlType;
+
+import org.apache.log4j.Logger;
+import org.hibernate.annotations.Cascade;
+import org.hibernate.annotations.CascadeType;
+import org.hibernate.envers.Audited;
+import org.hibernate.envers.NotAudited;
+import org.hibernate.search.annotations.Analyze;
+import org.hibernate.search.annotations.Field;
+import org.hibernate.search.annotations.Indexed;
+import org.hibernate.search.annotations.IndexedEmbedded;
+import org.springframework.security.core.Authentication;
+import org.springframework.security.core.GrantedAuthority;
+import org.springframework.security.core.context.SecurityContextHolder;
+import org.springframework.security.core.userdetails.UserDetails;
+
+import eu.etaxonomy.cdm.model.agent.Person;
+
+@XmlAccessorType(XmlAccessType.FIELD)
+@XmlType(name = "User", propOrder = {
+ "username",
+ "password",
+ "emailAddress",
+ "grantedAuthorities",
+ "groups",
+ "enabled",
+ "accountNonExpired",
+ "credentialsNonExpired",
+ "accountNonLocked",
+ "person"
+})
+@XmlRootElement(name = "User")
+@Entity
+@Indexed(index = "eu.etaxonomy.cdm.model.common.User")
+@Audited
+@Table(name = "UserAccount")
+public class User extends CdmBase implements UserDetails {
+ private static final long serialVersionUID = 6582191171369439163L;
+ private static final Logger logger = Logger.getLogger(User.class);
+
+ // **************************** FACTORY *****************************************/
+
+ public static User NewInstance(String username, String pwd){
+ User user = new User();
+ user.setUsername(username);
+ user.setPassword(pwd);
+
+ user.setAccountNonExpired(true);
+ user.setAccountNonLocked(true);
+ user.setCredentialsNonExpired(true);
+ user.setEnabled(true);
+
+ return user;
+ }
+
+ public static User NewInstance(String personTitle, String username, String pwd){
+ User user = new User();
+ user.setUsername(username);
+ user.setPassword(pwd);
+
+ user.setAccountNonExpired(true);
+ user.setAccountNonLocked(true);
+ user.setCredentialsNonExpired(true);
+ user.setEnabled(true);
+ Person userPerson = Person.NewTitledInstance(personTitle);
+ user.setPerson(userPerson);
+
+ return user;
+ }
+
+//***************************** Fields *********************** /
+
+ @XmlElement(name = "Username")
+ @Column(unique = true)
+ @Field(analyze = Analyze.NO)
+ @NotNull
+ protected String username;
+
+ /**
+ * a salted, MD5 encoded hash of the plaintext password
+ */
+ @XmlElement(name = "Password")
+ @NotAudited
+ protected String password;
+
+ @XmlElement(name = "EmailAddress")
+ protected String emailAddress;
+
+ @XmlElementWrapper(name = "GrantedAuthorities")
+ @XmlElement(name = "GrantedAuthority", type = GrantedAuthorityImpl.class)
+ @XmlIDREF
+ @XmlSchemaType(name = "IDREF")
+ @ManyToMany(fetch = FetchType.LAZY, targetEntity = GrantedAuthorityImpl.class)
+ //preliminary #5369
+ @JoinTable(joinColumns = @JoinColumn( name="UserAccount_id"))
+ @Cascade({CascadeType.SAVE_UPDATE,CascadeType.MERGE, CascadeType.REFRESH}) // see #2414 (Group updating doesn't work)
+ @NotAudited
+ protected Set<GrantedAuthority> grantedAuthorities = new HashSet<GrantedAuthority>(); //authorities of this user only
+
+ @XmlElementWrapper(name = "Groups")
+ @XmlElement(name = "Group")
+ @XmlIDREF
+ @XmlSchemaType(name = "IDREF")
+ @ManyToMany(fetch = FetchType.LAZY)
+ @Cascade({CascadeType.REFRESH, CascadeType.SAVE_UPDATE,CascadeType.MERGE}) // see #2414 (Group updating doesn't work)
+ @IndexedEmbedded(depth = 1)
+ @NotAudited
+ protected Set<Group> groups = new HashSet<Group>();
+
+ @XmlElement(name = "Enabled")
+ protected boolean enabled;
+
+ @XmlElement(name = "AccountNonExpired")
+ protected boolean accountNonExpired;
+
+ @XmlElement(name = "CredentialsNonExpired")
+ protected boolean credentialsNonExpired;
+
+ @XmlElement(name = "AccountNonLocked")
+ protected boolean accountNonLocked;
+
+ @XmlElement(name = "Person")
+ @XmlIDREF
+ @XmlSchemaType(name = "IDREF")
+ @OneToOne(fetch = FetchType.LAZY)
+ @Cascade({CascadeType.SAVE_UPDATE,CascadeType.MERGE})
+ @IndexedEmbedded(depth = 1)
+ protected Person person;
+
+ @XmlTransient
+ @Transient
+ private Set<GrantedAuthority> authorities; //authorities of this user and of all groups the user belongs to
+
+//***************************** Constructor *********************** /
+
+ protected User(){
+ super();
+ }
+
+// ***************************** METHODS ******************************/
+
+ /**
+ * Initializes or refreshes the collection of authorities, See
+ * {@link #getAuthorities()}
+ */
+ //FIXME made public as preliminary solution to #4053 (Transient field User.authorities not refreshed on reloading entity)
+ public void initAuthorities() {
+ authorities = new HashSet<GrantedAuthority>();
+ authorities.addAll(grantedAuthorities);
+ for(Group group : groups) {
+ authorities.addAll(group.getGrantedAuthorities());
+ }
+ }
+
+ /**
+ * Implementation of {@link UserDetails#getAuthorities()}
+ *
+ * {@inheritDoc}
+ *
+ * @return returns all {@code Set<GrantedAuthority>} instances contained in
+ * the sets {@link #getGrantedAuthorities()} and
+ * {@link #getGroups()}
+ */
+ @Override
+ @Transient
+ public Collection<GrantedAuthority> getAuthorities() {
+ if(authorities == null || authorities.size() == 0) {
+ initAuthorities();
+ }
+ return authorities;
+ }
+
+ @Override
+ public String getPassword() {
+ return password;
+ }
+
+ @Override
+ public String getUsername() {
+ return username;
+ }
+
+ @Override
+ public boolean isAccountNonExpired() {
+ return accountNonExpired;
+ }
+
+ @Override
+ public boolean isAccountNonLocked() {
+ return accountNonLocked;
+ }
+
+ @Override
+ public boolean isCredentialsNonExpired() {
+ return credentialsNonExpired;
+ }
+
+ @Override
+ public boolean isEnabled() {
+ return enabled;
+ }
+
+ public String getEmailAddress() {
+ return emailAddress;
+ }
+
+ public void setEmailAddress(String emailAddress) {
+ this.emailAddress = emailAddress;
+ }
+
+ public Set<GrantedAuthority> getGrantedAuthorities() {
+ return grantedAuthorities;
+ }
+
+ public void setGrantedAuthorities(Set<GrantedAuthority> grantedAuthorities) {
+ this.grantedAuthorities = grantedAuthorities;
+ initAuthorities();
+ }
+
+ public void setUsername(String username) {
+ this.username = username;
+ }
+
+ public void setPassword(String password) {
+ this.password = password;
+ }
+
+ public void setEnabled(boolean enabled) {
+ this.enabled = enabled;
+ }
+
+ public void setAccountNonExpired(boolean accountNonExpired) {
+ this.accountNonExpired = accountNonExpired;
+ }
+
+ public void setCredentialsNonExpired(boolean credentialsNonExpired) {
+ this.credentialsNonExpired = credentialsNonExpired;
+ }
+
+ public void setAccountNonLocked(boolean accountNonLocked) {
+ this.accountNonLocked = accountNonLocked;
+ }
+
+ protected void setGroups(Set<Group> groups) {
+ this.groups = groups;
+ initAuthorities();
+ }
+
+ public Set<Group> getGroups() {
+ return groups;
+ }
+
+
+ public Person getPerson() {
+ return person;
+ }
+
+ public void setPerson(Person person) {
+ this.person = person;
+ }
+
+ public static User getCurrentAuthenticatedUser() {
+ Authentication authentication = SecurityContextHolder.getContext().getAuthentication();
+ if(authentication != null && authentication.getPrincipal() != null && authentication.getPrincipal() instanceof User) {
+ return (User)authentication.getPrincipal();
+ }
+ return null;
+ }
+
+//*********************** CLONE ********************************************************/
+
+ /**
+ * Clones <i>this</i> User. This is a shortcut that enables to create
+ * a new instance that differs only slightly from <i>this</i> User.
+ * The corresponding person is cloned.
+ *
+ * @see eu.etaxonomy.cdm.model.common.CdmBase#clone()
+ * @see java.lang.Object#clone()
+ */
+ @Override
+ public Object clone() {
+ try{
+ User result = (User)super.clone();
+ result.setPerson((Person)this.person.clone());
+ return result;
+ } catch (CloneNotSupportedException e){
+ logger.warn("Object does not implement cloneable");
+ e.printStackTrace();
+ return null;
+ }
+
+
+ }
+}
import javax.persistence.ManyToOne;
import javax.persistence.MapKeyJoinColumn;
import javax.persistence.OneToMany;
+import javax.persistence.OrderColumn;
import javax.persistence.Transient;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import org.apache.log4j.Logger;
import org.hibernate.annotations.Cascade;
import org.hibernate.annotations.CascadeType;
-import org.hibernate.annotations.IndexColumn;
+import org.hibernate.annotations.ListIndexBase;
import org.hibernate.envers.Audited;
import org.hibernate.search.annotations.IndexedEmbedded;
@XmlIDREF
@XmlSchemaType(name = "IDREF")
@ManyToMany(fetch = FetchType.LAZY)
- @IndexColumn(name="sortIndex", base = 0)
+ @OrderColumn(name="sortIndex")
+ @ListIndexBase(value=0) //not really needed as this is the default
@Cascade({CascadeType.SAVE_UPDATE, CascadeType.MERGE})
private List<Media> media = new ArrayList<Media>();
@ManyToOne(fetch = FetchType.LAZY)
@Cascade({CascadeType.SAVE_UPDATE,CascadeType.MERGE})
@IndexedEmbedded
- private DescriptionBase inDescription;
+ private DescriptionBase<?> inDescription;
@XmlElement(name = "TimePeriod")
private TimePeriod timeperiod = TimePeriod.NewInstance();
import javax.persistence.Entity;
import javax.persistence.FetchType;
+import javax.persistence.JoinColumn;
import javax.persistence.JoinTable;
import javax.persistence.ManyToMany;
import javax.xml.bind.annotation.XmlAccessType;
import eu.etaxonomy.cdm.model.name.TaxonNameBase;
import eu.etaxonomy.cdm.model.taxon.Taxon;
+
/**
* The class for individual properties (also designed as character, type or
* category) of observed phenomena able to be described or measured. It also
/* for M:M see #4843 */
@ManyToMany(fetch = FetchType.LAZY)
- @JoinTable(name="DefinedTermBase_RecommendedModifierEnumeration")
+ //join columns preliminary #5369
+ @JoinTable(
+ name="DefinedTermBase_RecommendedModifierEnumeration",
+ joinColumns = @JoinColumn( name="DefinedTermBase_id")
+ )
private final Set<TermVocabulary<DefinedTerm>> recommendedModifierEnumeration = new HashSet<TermVocabulary<DefinedTerm>>();
-
@ManyToMany(fetch = FetchType.LAZY)
- @JoinTable(name="DefinedTermBase_StatisticalMeasure")
+ //join columns preliminary #5369
+ @JoinTable(
+ name="DefinedTermBase_StatisticalMeasure",
+ joinColumns = @JoinColumn( name="DefinedTermBase_id")
+ )
private final Set<StatisticalMeasure> recommendedStatisticalMeasures = new HashSet<StatisticalMeasure>();
/* for M:M see #4843 */
@ManyToMany(fetch = FetchType.LAZY)
- @JoinTable(name="DefinedTermBase_SupportedCategoricalEnumeration")
+ //join columns preliminary #5369
+ @JoinTable(
+ name="DefinedTermBase_SupportedCategoricalEnumeration",
+ joinColumns = @JoinColumn( name="DefinedTermBase_id")
+ )
private final Set<TermVocabulary<State>> supportedCategoricalEnumerations = new HashSet<TermVocabulary<State>>();
@ManyToMany(fetch = FetchType.LAZY)
- @JoinTable(name="DefinedTermBase_MeasurementUnit")
+ //join columns preliminary #5369
+ @JoinTable(
+ name="DefinedTermBase_MeasurementUnit",
+ joinColumns = @JoinColumn( name="DefinedTermBase_id")
+ )
private final Set<MeasurementUnit> recommendedMeasurementUnits = new HashSet<MeasurementUnit>();
/* ***************** CONSTRUCTOR AND FACTORY METHODS **********************************/
/* *************************************************************************************/
-
- /* (non-Javadoc)
- * @see eu.etaxonomy.cdm.model.common.DefinedTermBase#resetTerms()
- */
@Override
public void resetTerms(){
termMap = null;
/**
* Copyright (C) 2007 EDIT
-* European Distributed Institute of Taxonomy
+* European Distributed Institute of Taxonomy
* http://www.e-taxonomy.eu
-*
+*
* The contents of this file are subject to the Mozilla Public License Version 1.1
* See LICENSE.TXT at the top of this package for the full license terms.
*/
import eu.etaxonomy.cdm.model.common.Representation;
import eu.etaxonomy.cdm.model.location.NamedArea;
import eu.etaxonomy.cdm.model.media.Media;
+import eu.etaxonomy.cdm.model.occurrence.SpecimenOrObservationBase;
import eu.etaxonomy.cdm.model.taxon.Taxon;
/**
* {@link SpecimenOrObservationBase specimens or observations} (this means to assign {@link Taxon taxa} to).
* The determination process is based on the tree structure of the key and on
* the statements of its leads.
- *
- * @author m.doering
+ *
+ * @author m.doering
* @version 1.0
* @created 08-Nov-2007 13:06:28
*/
public class MediaKey extends Media implements IIdentificationKey{
private static final long serialVersionUID = -29095811051894471L;
private static final Logger logger = Logger.getLogger(MediaKey.class);
-
+
@XmlElementWrapper(name = "CoveredTaxa")
@XmlElement(name = "CoveredTaxon")
@XmlIDREF
@XmlSchemaType(name = "IDREF")
@ManyToMany(fetch = FetchType.LAZY)
- @NotNull
+ //preliminary #5369
+ //TODO should we also name the JoinTable here as for the other scopes
+ //we may want to rename it to MediaKey_CoveredTaxa/Taxon
+ @JoinTable(joinColumns = @JoinColumn( name="Media_id"))
+ @NotNull
private Set<Taxon> coveredTaxa = new HashSet<Taxon>();
-
+
@XmlElementWrapper( name = "GeographicalScope")
@XmlElement( name = "Area")
@XmlIDREF
@XmlSchemaType(name = "IDREF")
@ManyToMany(fetch = FetchType.LAZY)
- @JoinTable(name="MediaKey_NamedArea")
+ //preliminary #5369
+ @JoinTable(
+ name="MediaKey_NamedArea",
+ joinColumns = @JoinColumn( name="Media_id")
+ )
@NotNull
private Set<NamedArea> geographicalScope = new HashSet<NamedArea>();
-
+
@XmlElementWrapper(name = "TaxonomicScope")
@XmlElement(name = "Taxon")
@XmlIDREF
)
@NotNull
private Set<Taxon> taxonomicScope = new HashSet<Taxon>();
-
+
@XmlElementWrapper( name = "ScopeRestrictions")
@XmlElement( name = "Restriction")
@XmlIDREF
@XmlSchemaType(name = "IDREF")
@ManyToMany(fetch = FetchType.LAZY)
- @JoinTable(name="MediaKey_Scope")
+ //preliminary #5369
+ @JoinTable(
+ name="MediaKey_Scope",
+ joinColumns = @JoinColumn( name="Media_id"))
@NotNull
private Set<DefinedTerm> scopeRestrictions = new HashSet<DefinedTerm>();
-
+
@XmlElementWrapper( name = "KeyRepresentations")
@XmlElement( name = "KeyRepresentation")
@XmlIDREF
@Cascade({CascadeType.SAVE_UPDATE, CascadeType.MERGE,CascadeType.DELETE})
@NotNull
private Set<Representation> keyRepresentations = new HashSet<Representation>();
-
- /**
+
+ /**
* Class constructor: creates a new empty identification key instance.
*/
protected MediaKey() {
super();
}
-
- /**
+
+ /**
* Creates a new empty identification key instance.
*/
public static MediaKey NewInstance(){
return new MediaKey();
}
-
- /**
+
+ /**
* Returns the set of possible {@link Taxon taxa} corresponding to
* <i>this</i> identification key.
*/
- public Set<Taxon> getCoveredTaxa() {
+ @Override
+ public Set<Taxon> getCoveredTaxa() {
if(coveredTaxa == null) {
this.coveredTaxa = new HashSet<Taxon>();
}
return coveredTaxa;
}
/**
- * @see #getCoveredTaxa()
+ * @see #getCoveredTaxa()
*/
protected void setCoveredTaxa(Set<Taxon> coveredTaxa) {
this.coveredTaxa = coveredTaxa;
}
-
+
/**
* Adds a {@link Taxon taxa} to the set of {@link #getCoveredTaxa() covered taxa}
* corresponding to <i>this</i> identification key.
- *
+ *
* @param taxon the taxon to be added to <i>this</i> identification key
* @see #getCoveredTaxa()
*/
- public void addCoveredTaxon(Taxon taxon) {
+ @Override
+ public void addCoveredTaxon(Taxon taxon) {
this.coveredTaxa.add(taxon);
}
-
- /**
+
+ /**
* Removes one element from the set of {@link #getCoveredTaxa() covered taxa}
* corresponding to <i>this</i> identification key.
*
* @see #getCoveredTaxa()
* @see #addCoveredTaxon(Taxon)
*/
- public void removeCoveredTaxon(Taxon taxon) {
+ @Override
+ public void removeCoveredTaxon(Taxon taxon) {
this.coveredTaxa.remove(taxon);
}
- /**
+ /**
* Returns the set of {@link NamedArea named areas} indicating the geospatial
* data where <i>this</i> identification key is valid.
*/
- public Set<NamedArea> getGeographicalScope() {
+ @Override
+ public Set<NamedArea> getGeographicalScope() {
if(geographicalScope == null) {
this.geographicalScope = new HashSet<NamedArea>();
}
return geographicalScope;
}
-
+
/**
* Adds a {@link NamedArea geoScope} to the set of {@link #getGeoScopes() geogspatial scopes}
* corresponding to <i>this</i> identification key.
- *
+ *
* @param geoScope the named area to be added to <i>this</i> identification key
* @see #getGeoScopes()
*/
- public void addGeographicalScope(NamedArea geoScope) {
+ @Override
+ public void addGeographicalScope(NamedArea geoScope) {
this.geographicalScope.add(geoScope);
}
- /**
+ /**
* Removes one element from the set of {@link #getGeoScopes() geogspatial scopes}
* corresponding to <i>this</i> identification key.
*
* @see #getGeoScopes()
* @see #addGeoScope(NamedArea)
*/
- public void removeGeographicalScope(NamedArea geoScope) {
+ @Override
+ public void removeGeographicalScope(NamedArea geoScope) {
this.geographicalScope.remove(geoScope);
}
- /**
+ /**
* Returns the set of {@link Taxon taxa} that define the taxonomic
- * scope of <i>this</i> identification key
+ * scope of <i>this</i> identification key
*/
- public Set<Taxon> getTaxonomicScope() {
+ @Override
+ public Set<Taxon> getTaxonomicScope() {
if(taxonomicScope == null) {
this.taxonomicScope = new HashSet<Taxon>();
}
return taxonomicScope;
}
-
+
/**
* Adds a {@link Taxon taxa} to the set of {@link #getTaxonomicScope() taxonomic scopes}
* corresponding to <i>this</i> identification key.
- *
+ *
* @param taxon the taxon to be added to <i>this</i> identification key
* @see #getTaxonomicScope()
*/
- public void addTaxonomicScope(Taxon taxon) {
+ @Override
+ public void addTaxonomicScope(Taxon taxon) {
this.taxonomicScope.add(taxon);
}
-
- /**
+
+ /**
* Removes one element from the set of {@link #getTaxonomicScope() taxonomic scopes}
* corresponding to <i>this</i> identification key.
*
* @see #getTaxonomicScope()
* @see #addTaxonomicScope(Taxon)
*/
- public void removeTaxonomicScope(Taxon taxon) {
+ @Override
+ public void removeTaxonomicScope(Taxon taxon) {
this.taxonomicScope.remove(taxon);
}
-
- /**
+
+ /**
* Returns the set of {@link Representation key representations} corresponding to
- * <i>this</i> identification key
+ * <i>this</i> identification key
*/
public Set<Representation> getKeyRepresentations() {
if(keyRepresentations == null) {
}
return keyRepresentations;
}
-
+
/**
* Adds a {@link Representation key representation} to the set of {@link #getKeyRepresentations() key representations}
* corresponding to <i>this</i> identification key.
- *
+ *
* @param keyRepresentation the key representation to be added to <i>this</i> identification key
* @see #getKeyRepresentations()
*/
public void addKeyRepresentation(Representation keyRepresentation) {
this.keyRepresentations.add(keyRepresentation);
}
-
- /**
+
+ /**
* Removes one element from the set of {@link #getKeyRepresentations() key representations}
* corresponding to <i>this</i> identification key.
*
public void removeKeyRepresentation(Representation keyRepresentation) {
this.keyRepresentations.remove(keyRepresentation);
}
-
- /**
+
+ /**
* Returns the set of {@link Scope scope restrictions} corresponding to
- * <i>this</i> identification key
+ * <i>this</i> identification key
*/
- public Set<DefinedTerm> getScopeRestrictions() {
+ @Override
+ public Set<DefinedTerm> getScopeRestrictions() {
if(scopeRestrictions == null) {
this.scopeRestrictions = new HashSet<DefinedTerm>();
}
return scopeRestrictions;
}
-
+
/**
* Adds a {@link Scope scope restriction} to the set of {@link #getScopeRestrictions() scope restrictions}
* corresponding to <i>this</i> identification key.
- *
+ *
* @param scopeRestriction the scope restriction to be added to <i>this</i> identification key
* @see #getScopeRestrictions()
*/
- public void addScopeRestriction(DefinedTerm scopeRestriction) {
+ @Override
+ public void addScopeRestriction(DefinedTerm scopeRestriction) {
this.scopeRestrictions.add(scopeRestriction);
}
-
- /**
+
+ /**
* Removes one element from the set of {@link #getScopeRestrictions() scope restrictions}
* corresponding to <i>this</i> identification key.
*
* @see #getScopeRestrictions()
* @see #addScopeRestriction(Scope)
*/
- public void removeScopeRestriction(DefinedTerm scopeRestriction) {
+ @Override
+ public void removeScopeRestriction(DefinedTerm scopeRestriction) {
this.scopeRestrictions.remove(scopeRestriction);
}
-
+
//*********************** CLONE ********************************************************/
-
- /**
+
+ /**
* Clones <i>this</i> MediaKey. This is a shortcut that enables to create
* a new instance that differs only slightly from <i>this</i> MediaKey by
* modifying only some of the attributes.
- *
+ *
* @see eu.etaxonomy.cdm.model.media.Media#clone()
* @see java.lang.Object#clone()
*/
@Override
public Object clone() {
MediaKey result;
-
+
try{
result = (MediaKey)super.clone();
-
+
result.coveredTaxa = new HashSet<Taxon>();
for (Taxon taxon: this.coveredTaxa){
result.addCoveredTaxon(taxon);
}
-
+
result.geographicalScope = new HashSet<NamedArea>();
for (NamedArea area: this.geographicalScope){
result.addGeographicalScope(area);
}
-
+
result.keyRepresentations = new HashSet<Representation>();
for (Representation rep: this.keyRepresentations) {
result.addKeyRepresentation(rep);
}
-
+
result.scopeRestrictions = new HashSet<DefinedTerm>();
for (DefinedTerm scope: this.scopeRestrictions){
result.addScopeRestriction(scope);
}
-
+
result.taxonomicScope = new HashSet<Taxon>();
for (Taxon taxon: this.taxonomicScope){
result.addTaxonomicScope(taxon);
}
-
+
return result;
-
+
}catch (CloneNotSupportedException e) {
logger.warn("Object does not implement cloneable");
e.printStackTrace();
return null;
}
-
-
+
+
}
}
\ No newline at end of file
-// $Id$\r
-/**\r
-* Copyright (C) 2007 EDIT\r
-* European Distributed Institute of Taxonomy \r
-* http://www.e-taxonomy.eu\r
-* \r
-* The contents of this file are subject to the Mozilla Public License Version 1.1\r
-* See LICENSE.TXT at the top of this package for the full license terms.\r
-*/\r
-\r
-package eu.etaxonomy.cdm.model.description;\r
-\r
-import java.util.HashSet;\r
-import java.util.Set;\r
-\r
-import javax.persistence.Entity;\r
-import javax.persistence.FetchType;\r
-import javax.persistence.JoinColumn;\r
-import javax.persistence.JoinTable;\r
-import javax.persistence.ManyToMany;\r
-import javax.validation.constraints.NotNull;\r
-import javax.xml.bind.annotation.XmlAccessType;\r
-import javax.xml.bind.annotation.XmlAccessorType;\r
-import javax.xml.bind.annotation.XmlElement;\r
-import javax.xml.bind.annotation.XmlElementWrapper;\r
-import javax.xml.bind.annotation.XmlIDREF;\r
-import javax.xml.bind.annotation.XmlRootElement;\r
-import javax.xml.bind.annotation.XmlSchemaType;\r
-import javax.xml.bind.annotation.XmlType;\r
-\r
-import org.apache.log4j.Logger;\r
-import org.hibernate.envers.Audited;\r
-import org.hibernate.search.annotations.Indexed;\r
-\r
-import eu.etaxonomy.cdm.model.common.DefinedTerm;\r
-import eu.etaxonomy.cdm.model.location.NamedArea;\r
-import eu.etaxonomy.cdm.model.taxon.Taxon;\r
-\r
-/**\r
- * \r
- * The class representing multi-access dynamic keys used to identify\r
- * {@link SpecimenOrObservationBase specimens or observations} (this means to assign {@link Taxon taxa} to).\r
- * The determination process is performed by an identification software.\r
- * \r
- * @author h.fradin\r
- * @created 13.08.2009\r
- * @version 1.0\r
- */\r
-\r
-@XmlAccessorType(XmlAccessType.FIELD)\r
-@XmlType(name = "MultiAccessKey", propOrder = {\r
- "coveredTaxa",\r
- "taxonomicScope",\r
- "geographicalScope",\r
- "scopeRestrictions"\r
-})\r
-@XmlRootElement(name = "MultiAccessKey")\r
-@Entity\r
-@Indexed(index = "eu.etaxonomy.cdm.model.media.WorkingSet")\r
-@Audited\r
-\r
-public class MultiAccessKey extends WorkingSet implements IIdentificationKey{\r
- private static final long serialVersionUID = -240407483572972239L;\r
- @SuppressWarnings("unused")\r
- private static final Logger logger = Logger.getLogger(MultiAccessKey.class);\r
- \r
- @XmlElementWrapper(name = "CoveredTaxa")\r
- @XmlElement(name = "CoveredTaxon")\r
- @XmlIDREF\r
- @XmlSchemaType(name = "IDREF")\r
- @ManyToMany(fetch = FetchType.LAZY)\r
- @NotNull\r
- private Set<Taxon> coveredTaxa = new HashSet<Taxon>();\r
- \r
- @XmlElementWrapper(name = "TaxonomicScope")\r
- @XmlElement(name = "Taxon")\r
- @XmlIDREF\r
- @XmlSchemaType(name = "IDREF")\r
- @ManyToMany(fetch = FetchType.LAZY)\r
- @JoinTable(\r
- name="MultiAccessKey_Taxon",\r
- joinColumns=@JoinColumn(name="multiAccessKey_id"),\r
- inverseJoinColumns=@JoinColumn(name="taxon_id")\r
- )\r
- @NotNull\r
- private Set<Taxon> taxonomicScope = new HashSet<Taxon>();\r
- \r
- @XmlElementWrapper( name = "GeographicalScope")\r
- @XmlElement( name = "Area")\r
- @XmlIDREF\r
- @XmlSchemaType(name = "IDREF")\r
- @ManyToMany(fetch = FetchType.LAZY)\r
- @JoinTable(name="MultiAccessKey_NamedArea")\r
- @NotNull\r
- private Set<NamedArea> geographicalScope = new HashSet<NamedArea>();\r
- \r
- @XmlElementWrapper( name = "ScopeRestrictions")\r
- @XmlElement( name = "Restriction")\r
- @XmlIDREF\r
- @XmlSchemaType(name = "IDREF")\r
- @ManyToMany(fetch = FetchType.LAZY)\r
- @JoinTable(name="MultiAccessKey_Scope")\r
- @NotNull\r
- private Set<DefinedTerm> scopeRestrictions = new HashSet<DefinedTerm>();\r
- \r
- /** \r
- * Class constructor: creates a new empty multi-access key instance.\r
- */\r
- protected MultiAccessKey() {\r
- super();\r
- }\r
- \r
- /** \r
- * Creates a new empty identification multi-access key instance.\r
- */\r
- public static MultiAccessKey NewInstance(){\r
- return new MultiAccessKey();\r
- }\r
- \r
- /** \r
- * Returns the set of possible {@link Taxon taxa} corresponding to\r
- * <i>this</i> identification key.\r
- */\r
- public Set<Taxon> getCoveredTaxa() {\r
- if(coveredTaxa == null) {\r
- this.coveredTaxa = new HashSet<Taxon>();\r
- }\r
- return coveredTaxa;\r
- }\r
- /**\r
- * @see #getCoveredTaxa() \r
- */\r
- protected void setCoveredTaxa(Set<Taxon> coveredTaxa) {\r
- this.coveredTaxa = coveredTaxa;\r
- }\r
- \r
- /**\r
- * Adds a {@link Taxon taxa} to the set of {@link #getCoveredTaxa() covered taxa}\r
- * corresponding to <i>this</i> identification key.\r
- * \r
- * @param taxon the taxon to be added to <i>this</i> identification key\r
- * @see #getCoveredTaxa()\r
- */\r
- public void addCoveredTaxon(Taxon taxon) {\r
- this.coveredTaxa.add(taxon);\r
- }\r
- \r
- /** \r
- * Removes one element from the set of {@link #getCoveredTaxa() covered taxa}\r
- * corresponding to <i>this</i> identification key.\r
- *\r
- * @param taxon the taxon which should be removed\r
- * @see #getCoveredTaxa()\r
- * @see #addCoveredTaxon(Taxon)\r
- */\r
- public void removeCoveredTaxon(Taxon taxon) {\r
- this.coveredTaxa.remove(taxon);\r
- }\r
-\r
- /** \r
- * Returns the set of {@link NamedArea named areas} indicating the geospatial\r
- * data where <i>this</i> identification key is valid.\r
- */\r
- public Set<NamedArea> getGeographicalScope() {\r
- if(geographicalScope == null) {\r
- this.geographicalScope = new HashSet<NamedArea>();\r
- } \r
- return geographicalScope;\r
- }\r
- \r
- /**\r
- * Adds a {@link NamedArea geoScope} to the set of {@link #getGeoScopes() geogspatial scopes}\r
- * corresponding to <i>this</i> identification key.\r
- * \r
- * @param geoScope the named area to be added to <i>this</i> identification key\r
- * @see #getGeoScopes()\r
- */\r
- public void addGeographicalScope(NamedArea geoScope) {\r
- this.geographicalScope.add(geoScope);\r
- }\r
- /** \r
- * Removes one element from the set of {@link #getGeoScopes() geogspatial scopes}\r
- * corresponding to <i>this</i> identification key.\r
- *\r
- * @param geoScope the named area which should be removed\r
- * @see #getGeoScopes()\r
- * @see #addGeoScope(NamedArea)\r
- */\r
- public void removeGeographicalScope(NamedArea geoScope) {\r
- this.geographicalScope.remove(geoScope);\r
- }\r
-\r
- /** \r
- * Returns the set of {@link Taxon taxa} that define the taxonomic\r
- * scope of <i>this</i> identification key \r
- */\r
- public Set<Taxon> getTaxonomicScope() {\r
- if(taxonomicScope == null) {\r
- this.taxonomicScope = new HashSet<Taxon>();\r
- }\r
- return taxonomicScope;\r
- }\r
- \r
- /**\r
- * Adds a {@link Taxon taxa} to the set of {@link #getTaxonomicScope() taxonomic scopes}\r
- * corresponding to <i>this</i> identification key.\r
- * \r
- * @param taxon the taxon to be added to <i>this</i> identification key\r
- * @see #getTaxonomicScope()\r
- */\r
- public void addTaxonomicScope(Taxon taxon) {\r
- this.taxonomicScope.add(taxon);\r
- }\r
- \r
- /** \r
- * Removes one element from the set of {@link #getTaxonomicScope() taxonomic scopes}\r
- * corresponding to <i>this</i> identification key.\r
- *\r
- * @param taxon the taxon which should be removed\r
- * @see #getTaxonomicScope()\r
- * @see #addTaxonomicScope(Taxon)\r
- */\r
- public void removeTaxonomicScope(Taxon taxon) {\r
- this.taxonomicScope.remove(taxon);\r
- }\r
- \r
- /** \r
- * Returns the set of {@link Scope scope restrictions} corresponding to\r
- * <i>this</i> identification key \r
- */\r
- public Set<DefinedTerm> getScopeRestrictions() {\r
- if(scopeRestrictions == null) {\r
- this.scopeRestrictions = new HashSet<DefinedTerm>();\r
- }\r
- return scopeRestrictions;\r
- }\r
- \r
- /**\r
- * Adds a {@link Scope scope restriction} to the set of {@link #getScopeRestrictions() scope restrictions}\r
- * corresponding to <i>this</i> identification key.\r
- * \r
- * @param scopeRestriction the scope restriction to be added to <i>this</i> identification key\r
- * @see #getScopeRestrictions()\r
- */\r
- public void addScopeRestriction(DefinedTerm scopeRestriction) {\r
- this.scopeRestrictions.add(scopeRestriction);\r
- }\r
- \r
- /** \r
- * Removes one element from the set of {@link #getScopeRestrictions() scope restrictions}\r
- * corresponding to <i>this</i> identification key.\r
- *\r
- * @param scopeRestriction the scope restriction which should be removed\r
- * @see #getScopeRestrictions()\r
- * @see #addScopeRestriction(Scope)\r
- */\r
- public void removeScopeRestriction(DefinedTerm scopeRestriction) {\r
- this.scopeRestrictions.remove(scopeRestriction);\r
- }\r
- \r
-//*********************** CLONE ********************************************************/\r
- \r
- /** \r
- * Clones <i>this</i> MultiAccessKey. This is a shortcut that enables to create\r
- * a new instance that differs only slightly from <i>this</i> MultiAccessKey by\r
- * modifying only some of the attributes.\r
- * \r
- * @see eu.etaxonomy.cdm.model.common.AnnotatableEntity#clone()\r
- * @see java.lang.Object#clone()\r
- */\r
- @Override\r
- public Object clone() {\r
- MultiAccessKey result;\r
- \r
- result = (MultiAccessKey)super.clone();\r
- \r
- result.coveredTaxa = new HashSet<Taxon>();\r
- for (Taxon taxon: this.coveredTaxa){\r
- result.addCoveredTaxon(taxon);\r
- }\r
- \r
- result.geographicalScope = new HashSet<NamedArea>();\r
- for (NamedArea area: this.geographicalScope){\r
- result.addGeographicalScope(area);\r
- }\r
- \r
- result.scopeRestrictions = new HashSet<DefinedTerm>();\r
- for (DefinedTerm scope: this.scopeRestrictions){\r
- result.addScopeRestriction(scope);\r
- }\r
- \r
- result.taxonomicScope = new HashSet<Taxon>();\r
- for (Taxon taxon: this.taxonomicScope){\r
- result.addTaxonomicScope(taxon);\r
- }\r
- return result;\r
- \r
- }\r
-}\r
+// $Id$
+/**
+* Copyright (C) 2007 EDIT
+* European Distributed Institute of Taxonomy
+* http://www.e-taxonomy.eu
+*
+* The contents of this file are subject to the Mozilla Public License Version 1.1
+* See LICENSE.TXT at the top of this package for the full license terms.
+*/
+
+package eu.etaxonomy.cdm.model.description;
+
+import java.util.HashSet;
+import java.util.Set;
+
+import javax.persistence.Entity;
+import javax.persistence.FetchType;
+import javax.persistence.JoinColumn;
+import javax.persistence.JoinTable;
+import javax.persistence.ManyToMany;
+import javax.validation.constraints.NotNull;
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlElement;
+import javax.xml.bind.annotation.XmlElementWrapper;
+import javax.xml.bind.annotation.XmlIDREF;
+import javax.xml.bind.annotation.XmlRootElement;
+import javax.xml.bind.annotation.XmlSchemaType;
+import javax.xml.bind.annotation.XmlType;
+
+import org.apache.log4j.Logger;
+import org.hibernate.envers.Audited;
+import org.hibernate.search.annotations.Indexed;
+
+import eu.etaxonomy.cdm.model.common.DefinedTerm;
+import eu.etaxonomy.cdm.model.location.NamedArea;
+import eu.etaxonomy.cdm.model.occurrence.SpecimenOrObservationBase;
+import eu.etaxonomy.cdm.model.taxon.Taxon;
+
+/**
+ *
+ * The class representing multi-access dynamic keys used to identify
+ * {@link SpecimenOrObservationBase specimens or observations} (this means to assign {@link Taxon taxa} to).
+ * The determination process is performed by an identification software.
+ *
+ * @author h.fradin
+ * @created 13.08.2009
+ * @version 1.0
+ */
+
+@XmlAccessorType(XmlAccessType.FIELD)
+@XmlType(name = "MultiAccessKey", propOrder = {
+ "coveredTaxa",
+ "taxonomicScope",
+ "geographicalScope",
+ "scopeRestrictions"
+})
+@XmlRootElement(name = "MultiAccessKey")
+@Entity
+@Indexed(index = "eu.etaxonomy.cdm.model.media.WorkingSet")
+@Audited
+
+public class MultiAccessKey extends WorkingSet implements IIdentificationKey{
+ private static final long serialVersionUID = -240407483572972239L;
+ @SuppressWarnings("unused")
+ private static final Logger logger = Logger.getLogger(MultiAccessKey.class);
+
+ @XmlElementWrapper(name = "CoveredTaxa")
+ @XmlElement(name = "CoveredTaxon")
+ @XmlIDREF
+ @XmlSchemaType(name = "IDREF")
+ @ManyToMany(fetch = FetchType.LAZY)
+ //preliminary #5369
+ //TODO should we also name the JoinTable here as for the other scopes
+ //we may want to rename it to MediaKey_CoveredTaxa/Taxon
+ @JoinTable(joinColumns = @JoinColumn( name="WorkingSet_id"))
+ @NotNull
+ private Set<Taxon> coveredTaxa = new HashSet<Taxon>();
+
+ @XmlElementWrapper(name = "TaxonomicScope")
+ @XmlElement(name = "Taxon")
+ @XmlIDREF
+ @XmlSchemaType(name = "IDREF")
+ @ManyToMany(fetch = FetchType.LAZY)
+ @JoinTable(
+ name="MultiAccessKey_Taxon",
+ joinColumns=@JoinColumn(name="multiAccessKey_id"),
+ inverseJoinColumns=@JoinColumn(name="taxon_id")
+ )
+ @NotNull
+ private Set<Taxon> taxonomicScope = new HashSet<Taxon>();
+
+ @XmlElementWrapper( name = "GeographicalScope")
+ @XmlElement( name = "Area")
+ @XmlIDREF
+ @XmlSchemaType(name = "IDREF")
+ @ManyToMany(fetch = FetchType.LAZY)
+ //preliminary #5369
+ @JoinTable(name="MultiAccessKey_NamedArea",
+ joinColumns = @JoinColumn( name="WorkingSet_id")
+ )
+ @NotNull
+ private Set<NamedArea> geographicalScope = new HashSet<NamedArea>();
+
+ @XmlElementWrapper( name = "ScopeRestrictions")
+ @XmlElement( name = "Restriction")
+ @XmlIDREF
+ @XmlSchemaType(name = "IDREF")
+ @ManyToMany(fetch = FetchType.LAZY)
+ //preliminary #5369
+ @JoinTable(name="MultiAccessKey_Scope",
+ joinColumns = @JoinColumn( name="WorkingSet_id")
+ )
+ @NotNull
+ private Set<DefinedTerm> scopeRestrictions = new HashSet<DefinedTerm>();
+
+ /**
+ * Class constructor: creates a new empty multi-access key instance.
+ */
+ protected MultiAccessKey() {
+ super();
+ }
+
+ /**
+ * Creates a new empty identification multi-access key instance.
+ */
+ public static MultiAccessKey NewInstance(){
+ return new MultiAccessKey();
+ }
+
+ /**
+ * Returns the set of possible {@link Taxon taxa} corresponding to
+ * <i>this</i> identification key.
+ */
+ @Override
+ public Set<Taxon> getCoveredTaxa() {
+ if(coveredTaxa == null) {
+ this.coveredTaxa = new HashSet<Taxon>();
+ }
+ return coveredTaxa;
+ }
+ /**
+ * @see #getCoveredTaxa()
+ */
+ protected void setCoveredTaxa(Set<Taxon> coveredTaxa) {
+ this.coveredTaxa = coveredTaxa;
+ }
+
+ /**
+ * Adds a {@link Taxon taxa} to the set of {@link #getCoveredTaxa() covered taxa}
+ * corresponding to <i>this</i> identification key.
+ *
+ * @param taxon the taxon to be added to <i>this</i> identification key
+ * @see #getCoveredTaxa()
+ */
+ @Override
+ public void addCoveredTaxon(Taxon taxon) {
+ this.coveredTaxa.add(taxon);
+ }
+
+ /**
+ * Removes one element from the set of {@link #getCoveredTaxa() covered taxa}
+ * corresponding to <i>this</i> identification key.
+ *
+ * @param taxon the taxon which should be removed
+ * @see #getCoveredTaxa()
+ * @see #addCoveredTaxon(Taxon)
+ */
+ @Override
+ public void removeCoveredTaxon(Taxon taxon) {
+ this.coveredTaxa.remove(taxon);
+ }
+
+ /**
+ * Returns the set of {@link NamedArea named areas} indicating the geospatial
+ * data where <i>this</i> identification key is valid.
+ */
+ @Override
+ public Set<NamedArea> getGeographicalScope() {
+ if(geographicalScope == null) {
+ this.geographicalScope = new HashSet<NamedArea>();
+ }
+ return geographicalScope;
+ }
+
+ /**
+ * Adds a {@link NamedArea geoScope} to the set of {@link #getGeoScopes() geogspatial scopes}
+ * corresponding to <i>this</i> identification key.
+ *
+ * @param geoScope the named area to be added to <i>this</i> identification key
+ * @see #getGeoScopes()
+ */
+ @Override
+ public void addGeographicalScope(NamedArea geoScope) {
+ this.geographicalScope.add(geoScope);
+ }
+ /**
+ * Removes one element from the set of {@link #getGeoScopes() geogspatial scopes}
+ * corresponding to <i>this</i> identification key.
+ *
+ * @param geoScope the named area which should be removed
+ * @see #getGeoScopes()
+ * @see #addGeoScope(NamedArea)
+ */
+ @Override
+ public void removeGeographicalScope(NamedArea geoScope) {
+ this.geographicalScope.remove(geoScope);
+ }
+
+ /**
+ * Returns the set of {@link Taxon taxa} that define the taxonomic
+ * scope of <i>this</i> identification key
+ */
+ @Override
+ public Set<Taxon> getTaxonomicScope() {
+ if(taxonomicScope == null) {
+ this.taxonomicScope = new HashSet<Taxon>();
+ }
+ return taxonomicScope;
+ }
+
+ /**
+ * Adds a {@link Taxon taxa} to the set of {@link #getTaxonomicScope() taxonomic scopes}
+ * corresponding to <i>this</i> identification key.
+ *
+ * @param taxon the taxon to be added to <i>this</i> identification key
+ * @see #getTaxonomicScope()
+ */
+ @Override
+ public void addTaxonomicScope(Taxon taxon) {
+ this.taxonomicScope.add(taxon);
+ }
+
+ /**
+ * Removes one element from the set of {@link #getTaxonomicScope() taxonomic scopes}
+ * corresponding to <i>this</i> identification key.
+ *
+ * @param taxon the taxon which should be removed
+ * @see #getTaxonomicScope()
+ * @see #addTaxonomicScope(Taxon)
+ */
+ @Override
+ public void removeTaxonomicScope(Taxon taxon) {
+ this.taxonomicScope.remove(taxon);
+ }
+
+ /**
+ * Returns the set of {@link Scope scope restrictions} corresponding to
+ * <i>this</i> identification key
+ */
+ @Override
+ public Set<DefinedTerm> getScopeRestrictions() {
+ if(scopeRestrictions == null) {
+ this.scopeRestrictions = new HashSet<DefinedTerm>();
+ }
+ return scopeRestrictions;
+ }
+
+ /**
+ * Adds a {@link Scope scope restriction} to the set of {@link #getScopeRestrictions() scope restrictions}
+ * corresponding to <i>this</i> identification key.
+ *
+ * @param scopeRestriction the scope restriction to be added to <i>this</i> identification key
+ * @see #getScopeRestrictions()
+ */
+ @Override
+ public void addScopeRestriction(DefinedTerm scopeRestriction) {
+ this.scopeRestrictions.add(scopeRestriction);
+ }
+
+ /**
+ * Removes one element from the set of {@link #getScopeRestrictions() scope restrictions}
+ * corresponding to <i>this</i> identification key.
+ *
+ * @param scopeRestriction the scope restriction which should be removed
+ * @see #getScopeRestrictions()
+ * @see #addScopeRestriction(Scope)
+ */
+ @Override
+ public void removeScopeRestriction(DefinedTerm scopeRestriction) {
+ this.scopeRestrictions.remove(scopeRestriction);
+ }
+
+//*********************** CLONE ********************************************************/
+
+ /**
+ * Clones <i>this</i> MultiAccessKey. This is a shortcut that enables to create
+ * a new instance that differs only slightly from <i>this</i> MultiAccessKey by
+ * modifying only some of the attributes.
+ *
+ * @see eu.etaxonomy.cdm.model.common.AnnotatableEntity#clone()
+ * @see java.lang.Object#clone()
+ */
+ @Override
+ public Object clone() {
+ MultiAccessKey result;
+
+ result = (MultiAccessKey)super.clone();
+
+ result.coveredTaxa = new HashSet<Taxon>();
+ for (Taxon taxon: this.coveredTaxa){
+ result.addCoveredTaxon(taxon);
+ }
+
+ result.geographicalScope = new HashSet<NamedArea>();
+ for (NamedArea area: this.geographicalScope){
+ result.addGeographicalScope(area);
+ }
+
+ result.scopeRestrictions = new HashSet<DefinedTerm>();
+ for (DefinedTerm scope: this.scopeRestrictions){
+ result.addScopeRestriction(scope);
+ }
+
+ result.taxonomicScope = new HashSet<Taxon>();
+ for (Taxon taxon: this.taxonomicScope){
+ result.addTaxonomicScope(taxon);
+ }
+ return result;
+
+ }
+}
-// $Id$\r
-/**\r
- * Copyright (C) 2007 EDIT\r
- * European Distributed Institute of Taxonomy\r
- * http://www.e-taxonomy.eu\r
- *\r
- * The contents of this file are subject to the Mozilla Public License Version 1.1\r
- * See LICENSE.TXT at the top of this package for the full license terms.\r
- */\r
-\r
-package eu.etaxonomy.cdm.model.description;\r
-\r
-import java.io.PrintStream;\r
-import java.util.HashSet;\r
-import java.util.Set;\r
-\r
-import javax.persistence.Entity;\r
-import javax.persistence.FetchType;\r
-import javax.persistence.JoinColumn;\r
-import javax.persistence.JoinTable;\r
-import javax.persistence.ManyToMany;\r
-import javax.persistence.OneToOne;\r
-import javax.validation.constraints.NotNull;\r
-import javax.xml.bind.annotation.XmlAccessType;\r
-import javax.xml.bind.annotation.XmlAccessorType;\r
-import javax.xml.bind.annotation.XmlElement;\r
-import javax.xml.bind.annotation.XmlElementWrapper;\r
-import javax.xml.bind.annotation.XmlIDREF;\r
-import javax.xml.bind.annotation.XmlRootElement;\r
-import javax.xml.bind.annotation.XmlSchemaType;\r
-import javax.xml.bind.annotation.XmlType;\r
-\r
-import org.apache.commons.lang.StringUtils;\r
-import org.apache.log4j.Logger;\r
-import org.hibernate.annotations.Cascade;\r
-import org.hibernate.annotations.CascadeType;\r
-import org.hibernate.envers.Audited;\r
-import org.hibernate.search.annotations.Indexed;\r
-\r
-import eu.etaxonomy.cdm.common.CdmUtils;\r
-import eu.etaxonomy.cdm.model.common.DefinedTerm;\r
-import eu.etaxonomy.cdm.model.common.IdentifiableEntity;\r
-import eu.etaxonomy.cdm.model.common.Language;\r
-import eu.etaxonomy.cdm.model.location.NamedArea;\r
-import eu.etaxonomy.cdm.model.occurrence.SpecimenOrObservationBase;\r
-import eu.etaxonomy.cdm.model.taxon.Taxon;\r
-import eu.etaxonomy.cdm.strategy.cache.description.PolytomousKeyDefaultCacheStrategy;\r
-import eu.etaxonomy.cdm.strategy.generate.PolytomousKeyGenerator;\r
-\r
-/**\r
- * This class represents a fixed single-access key (dichotomous or\r
- * polytomous) used to identify (assign a {@link Taxon taxon} to) a {@link SpecimenOrObservationBase\r
- * specimen or observation}. The key may be written manually or may be generated automatically\r
- * e.g. by the {@link PolytomousKeyGenerator}. The different paths to the taxa are expressed\r
- * by a decision graph consisting of {@link PolytomousKeyNode\r
- * PolytomousKeyNodes}. The root node of such graph is accessible by\r
- * {@link #getRoot()}. Refer to {@link PolytomousKeyNode} for detailed\r
- * documentation on the decision graph structure.\r
- *\r
- * @author h.fradin\r
- * @created 13.08.2009\r
- *\r
- * @author a.mueller\r
- * @version 2.0 (08.11.2010)\r
- */\r
-@XmlAccessorType(XmlAccessType.FIELD)\r
-@XmlType(name = "PolytomousKey", propOrder = {\r
- "coveredTaxa",\r
- "taxonomicScope",\r
- "geographicalScope",\r
- "scopeRestrictions",\r
- "root",\r
- "startNumber"})\r
-@XmlRootElement(name = "PolytomousKey")\r
-@Entity\r
-@Indexed(index = "eu.etaxonomy.cdm.model.description.PolytomousKey")\r
-@Audited\r
-public class PolytomousKey extends IdentifiableEntity<PolytomousKeyDefaultCacheStrategy> implements IIdentificationKey {\r
- private static final long serialVersionUID = -3368243754557343942L;\r
- private static final Logger logger = Logger.getLogger(PolytomousKey.class);\r
-\r
- @XmlElementWrapper(name = "CoveredTaxa")\r
- @XmlElement(name = "CoveredTaxon")\r
- @XmlIDREF\r
- @XmlSchemaType(name = "IDREF")\r
- @ManyToMany(fetch = FetchType.LAZY)\r
- @NotNull\r
- @Cascade({CascadeType.SAVE_UPDATE, CascadeType.MERGE})\r
- private Set<Taxon> coveredTaxa = new HashSet<Taxon>();\r
-\r
- @XmlElementWrapper(name = "TaxonomicScope")\r
- @XmlElement(name = "Taxon")\r
- @XmlIDREF\r
- @XmlSchemaType(name = "IDREF")\r
- @ManyToMany(fetch = FetchType.LAZY)\r
- @JoinTable(name = "PolytomousKey_Taxon", joinColumns = @JoinColumn(name = "polytomousKey_id"), inverseJoinColumns = @JoinColumn(name = "taxon_id"))\r
- @NotNull\r
- @Cascade({CascadeType.SAVE_UPDATE, CascadeType.MERGE})\r
- private Set<Taxon> taxonomicScope = new HashSet<Taxon>();\r
-\r
- @XmlElementWrapper(name = "GeographicalScope")\r
- @XmlElement(name = "Area")\r
- @XmlIDREF\r
- @XmlSchemaType(name = "IDREF")\r
- @ManyToMany(fetch = FetchType.LAZY)\r
- @JoinTable(name = "PolytomousKey_NamedArea")\r
- @NotNull\r
- @Cascade({CascadeType.MERGE})\r
- private Set<NamedArea> geographicalScope = new HashSet<NamedArea>();\r
-\r
- @XmlElementWrapper(name = "ScopeRestrictions")\r
- @XmlElement(name = "Restriction")\r
- @XmlIDREF\r
- @XmlSchemaType(name = "IDREF")\r
- @ManyToMany(fetch = FetchType.LAZY)\r
- @JoinTable(name = "PolytomousKey_Scope")\r
- @NotNull\r
- @Cascade({CascadeType.MERGE})\r
- private Set<DefinedTerm> scopeRestrictions = new HashSet<DefinedTerm>();\r
-\r
- @XmlElement(name = "Root")\r
- @OneToOne(fetch = FetchType.LAZY)\r
- @Cascade({ CascadeType.SAVE_UPDATE, CascadeType.MERGE })\r
- private PolytomousKeyNode root;\r
-\r
- @XmlElement(name = "StartNumber")\r
- @Audited\r
- private int startNumber = 1;\r
-\r
-\r
-// ***************** STATIC METHODS ********************************/\r
-\r
- /**\r
- * Creates a new empty identification multi-access key instance.\r
- */\r
- public static PolytomousKey NewInstance() {\r
- return new PolytomousKey();\r
- }\r
-\r
- /**\r
- * Creates a new empty identification polytomous key instance.\r
- */\r
- public static PolytomousKey NewTitledInstance(String title) {\r
- PolytomousKey result = new PolytomousKey();\r
- result.setTitleCache(title, true);\r
- return result;\r
- }\r
-\r
-// ************************** CONSTRUCTOR ************************/\r
-\r
- /**\r
- * Class constructor: creates a new empty multi-access key instance.\r
- */\r
- protected PolytomousKey() {\r
- super();\r
- root = PolytomousKeyNode.NewInstance();\r
- root.setNodeNumber(getStartNumber());\r
- root.setKey(this);\r
- this.cacheStrategy = PolytomousKeyDefaultCacheStrategy.NewInstance();\r
- }\r
-\r
- // ************************ GETTER/ SETTER\r
-\r
- /**\r
- * Returns the topmost {@link PolytomousKeyNode polytomous key node} (root\r
- * node) of <i>this</i> polytomous key. The root node does not have any\r
- * parent. Since polytomous key nodes recursively point to their child nodes\r
- * the complete polytomous key is defined by its root node.\r
- */\r
- public PolytomousKeyNode getRoot() {\r
- return root;\r
- }\r
-\r
- /**\r
- * This method should be used by Hibernate only. If we want to make this\r
- * method public we have to think about biderionality and also what should\r
- * happen with the old root node.\r
- *\r
- * @see #getRoot()\r
- */\r
- public void setRoot(PolytomousKeyNode root) {\r
- this.root = root;\r
- if (root != null){\r
- root.setKey(this);\r
- }\r
- }\r
-\r
- /**\r
- * Returns the set of possible {@link Taxon taxa} corresponding to\r
- * <i>this</i> identification key.\r
- */\r
- @Override\r
- public Set<Taxon> getCoveredTaxa() {\r
- if (coveredTaxa == null) {\r
- this.coveredTaxa = new HashSet<Taxon>();\r
- }\r
- return coveredTaxa;\r
- }\r
-\r
- /**\r
- * @see #getCoveredTaxa()\r
- */\r
- protected void setCoveredTaxa(Set<Taxon> coveredTaxa) {\r
- this.coveredTaxa = coveredTaxa;\r
- }\r
-\r
- /**\r
- * Adds a {@link Taxon taxa} to the set of {@link #getCoveredTaxa() covered\r
- * taxa} corresponding to <i>this</i> identification key.\r
- *\r
- * @param taxon\r
- * the taxon to be added to <i>this</i> identification key\r
- * @see #getCoveredTaxa()\r
- */\r
- @Override\r
- public void addCoveredTaxon(Taxon taxon) {\r
- this.coveredTaxa.add(taxon);\r
- }\r
-\r
- /**\r
- * Removes one element from the set of {@link #getCoveredTaxa() covered\r
- * taxa} corresponding to <i>this</i> identification key.\r
- *\r
- * @param taxon\r
- * the taxon which should be removed\r
- * @see #getCoveredTaxa()\r
- * @see #addCoveredTaxon(Taxon)\r
- */\r
- @Override\r
- public void removeCoveredTaxon(Taxon taxon) {\r
- this.coveredTaxa.remove(taxon);\r
- }\r
-\r
- /**\r
- * Returns the set of {@link NamedArea named areas} indicating the\r
- * geospatial data where <i>this</i> identification key is valid.\r
- */\r
- @Override\r
- public Set<NamedArea> getGeographicalScope() {\r
- if (geographicalScope == null) {\r
- this.geographicalScope = new HashSet<NamedArea>();\r
- }\r
- return geographicalScope;\r
- }\r
-\r
- /**\r
- * Adds a {@link NamedArea geoScope} to the set of {@link #getGeoScopes()\r
- * geogspatial scopes} corresponding to <i>this</i> identification key.\r
- *\r
- * @param geoScope\r
- * the named area to be added to <i>this</i> identification key\r
- * @see #getGeoScopes()\r
- */\r
- @Override\r
- public void addGeographicalScope(NamedArea geoScope) {\r
- this.geographicalScope.add(geoScope);\r
- }\r
-\r
- /**\r
- * Removes one element from the set of {@link #getGeoScopes() geogspatial\r
- * scopes} corresponding to <i>this</i> identification key.\r
- *\r
- * @param geoScope\r
- * the named area which should be removed\r
- * @see #getGeoScopes()\r
- * @see #addGeoScope(NamedArea)\r
- */\r
- @Override\r
- public void removeGeographicalScope(NamedArea geoScope) {\r
- this.geographicalScope.remove(geoScope);\r
- }\r
-\r
- /**\r
- * Returns the set of {@link Taxon taxa} that define the taxonomic scope of\r
- * <i>this</i> identification key\r
- */\r
- @Override\r
- public Set<Taxon> getTaxonomicScope() {\r
- if (taxonomicScope == null) {\r
- this.taxonomicScope = new HashSet<Taxon>();\r
- }\r
- return taxonomicScope;\r
- }\r
-\r
- /**\r
- * Adds a {@link Taxon taxa} to the set of {@link #getTaxonomicScope()\r
- * taxonomic scopes} corresponding to <i>this</i> identification key.\r
- *\r
- * @param taxon\r
- * the taxon to be added to <i>this</i> identification key\r
- * @see #getTaxonomicScope()\r
- */\r
- @Override\r
- public void addTaxonomicScope(Taxon taxon) {\r
- this.taxonomicScope.add(taxon);\r
- }\r
-\r
- /**\r
- * Removes one element from the set of {@link #getTaxonomicScope() taxonomic\r
- * scopes} corresponding to <i>this</i> identification key.\r
- *\r
- * @param taxon\r
- * the taxon which should be removed\r
- * @see #getTaxonomicScope()\r
- * @see #addTaxonomicScope(Taxon)\r
- */\r
- @Override\r
- public void removeTaxonomicScope(Taxon taxon) {\r
- this.taxonomicScope.remove(taxon);\r
- }\r
-\r
- /**\r
- * Returns the set of {@link Scope scope restrictions} corresponding to\r
- * <i>this</i> identification key\r
- */\r
- @Override\r
- public Set<DefinedTerm> getScopeRestrictions() {\r
- if (scopeRestrictions == null) {\r
- this.scopeRestrictions = new HashSet<DefinedTerm>();\r
- }\r
- return scopeRestrictions;\r
- }\r
-\r
- /**\r
- * Adds a {@link Scope scope restriction} to the set of\r
- * {@link #getScopeRestrictions() scope restrictions} corresponding to\r
- * <i>this</i> identification key.\r
- *\r
- * @param scopeRestriction\r
- * the scope restriction to be added to <i>this</i>\r
- * identification key\r
- * @see #getScopeRestrictions()\r
- */\r
- @Override\r
- public void addScopeRestriction(DefinedTerm scopeRestriction) {\r
- this.scopeRestrictions.add(scopeRestriction);\r
- }\r
-\r
- /**\r
- * Removes one element from the set of {@link #getScopeRestrictions() scope\r
- * restrictions} corresponding to <i>this</i> identification key.\r
- *\r
- * @param scopeRestriction\r
- * the scope restriction which should be removed\r
- * @see #getScopeRestrictions()\r
- * @see #addScopeRestriction(Scope)\r
- */\r
- @Override\r
- public void removeScopeRestriction(DefinedTerm scopeRestriction) {\r
- this.scopeRestrictions.remove(scopeRestriction);\r
- }\r
-\r
-\r
- /**\r
- * The first number for the automated numbering of {@link PolytomousKeyNode key nodes}.\r
- * Default value is 1.\r
- * @return\r
- */\r
- public int getStartNumber() {\r
- return startNumber;\r
- }\r
-\r
- /**\r
- * @see #getStartNumber()\r
- * @param startNumber\r
- */\r
- public void setStartNumber(int startNumber) {\r
- this.startNumber = startNumber;\r
- }\r
-\r
- // ******************** toString *****************************************/\r
-\r
- private class IntegerObject {\r
- int number = 0;\r
-\r
- int inc() {\r
- return number++;\r
- };\r
-\r
- @Override\r
- public String toString() {\r
- return String.valueOf(number);\r
- }\r
- }\r
-\r
- public String print(PrintStream stream) {\r
- String title = this.getTitleCache() + "\n";\r
- String strPrint = title;\r
-\r
- if (stream != null) {\r
- stream.print(title);\r
- }\r
-\r
- PolytomousKeyNode root = this.getRoot();\r
- strPrint += printNode(root, null, " ", stream);\r
- return strPrint;\r
- }\r
-\r
- /**\r
- * TODO this is a preliminary implementation\r
- *\r
- * @param node\r
- * @param identation\r
- * @param no\r
- * @param myNumber\r
- * @param stream\r
- * @return\r
- */\r
- private String printNode(PolytomousKeyNode node, PolytomousKeyNode parent2,\r
- String identation, PrintStream stream) {\r
- String separator = ", ";\r
-\r
- String result = identation + node.getNodeNumber() + ". ";\r
- if (node != null) {\r
- // key choice\r
- String question = null;\r
- String feature = null;\r
- if (node.getQuestion() != null) {\r
- question = node.getQuestion().getLabelText(Language.DEFAULT());\r
- }\r
- if (node.getFeature() != null) {\r
- feature = node.getFeature().getLabel(Language.DEFAULT());\r
- }\r
- result += CdmUtils.concat(" - ", question, feature) + "\n";\r
- ;\r
-\r
- // Leads\r
- char nextCounter = 'a';\r
- for (PolytomousKeyNode child : node.getChildren()) {\r
- String leadNumber = String.valueOf(nextCounter++);\r
- if (child.getStatement() != null) {\r
- String statement = child.getStatement().getLabelText(\r
- Language.DEFAULT());\r
- result += identation + " " + leadNumber + ") "\r
- + (statement == null ? "" : (statement));\r
- result += " ... ";\r
- // child node\r
- if (!child.isLeaf()) {\r
- result += child.getNodeNumber() + separator;\r
- }\r
- // taxon\r
- if (child.getTaxon() != null) {\r
- String strTaxon = "";\r
- if (child.getTaxon().getName() != null) {\r
- strTaxon = child.getTaxon().getName()\r
- .getTitleCache();\r
- } else {\r
- strTaxon = child.getTaxon().getTitleCache();\r
- }\r
- result += strTaxon + separator;\r
- }\r
- // subkey\r
- if (child.getSubkey() != null) {\r
- String subkey = child.getSubkey().getTitleCache();\r
- result += subkey + separator;\r
- }\r
- // other node\r
- if (child.getOtherNode() != null) {\r
- PolytomousKeyNode otherNode = child.getOtherNode();\r
- String otherNodeString = null;\r
- if (child.getKey().equals(otherNode.getKey())) {\r
- otherNodeString = String.valueOf(otherNode\r
- .getNodeNumber());\r
- } else {\r
- otherNodeString = otherNode.getKey() + " "\r
- + otherNode.getNodeNumber();\r
- }\r
- result += otherNodeString + separator;\r
- }\r
-\r
- result = StringUtils.chomp(result, separator);\r
- result += "\n";\r
- }\r
- }\r
-\r
- if (stream != null) {\r
- stream.print(result);\r
- }\r
- for (PolytomousKeyNode child : node.getChildren()) {\r
- if (!child.isLeaf()) {\r
- result += printNode(child, node, identation + "", stream);\r
- }\r
- }\r
- }\r
- return result;\r
- }\r
-\r
- //\r
- // public List<PolytomousKeyNode> getChildren() {\r
- // return getRoot().getChildren();\r
- // }\r
-\r
- // *********************** CLONE ************************************/\r
-\r
- /**\r
- * Clones <i>this</i> PolytomousKey. This is a shortcut that enables to\r
- * create a new instance that differs only slightly from <i>this</i>\r
- * PolytomousKey by modifying only some of the attributes.\r
- *\r
- * @see eu.etaxonomy.cdm.model.common.IdentifiableEntity#clone()\r
- * @see java.lang.Object#clone()\r
- */\r
- @Override\r
- public Object clone() {\r
- PolytomousKey result;\r
-\r
- try {\r
- result = (PolytomousKey) super.clone();\r
-\r
- result.coveredTaxa = new HashSet<Taxon>();\r
- for (Taxon taxon : this.coveredTaxa) {\r
- result.addCoveredTaxon(taxon);\r
- }\r
-\r
- result.geographicalScope = new HashSet<NamedArea>();\r
- for (NamedArea area : this.geographicalScope) {\r
- result.addGeographicalScope(area);\r
- }\r
-\r
- result.root = (PolytomousKeyNode) this.root.clone();\r
-\r
- result.scopeRestrictions = new HashSet<DefinedTerm>();\r
- for (DefinedTerm scope : this.scopeRestrictions) {\r
- result.addScopeRestriction(scope);\r
- }\r
-\r
- result.taxonomicScope = new HashSet<Taxon>();\r
- for (Taxon taxon : this.taxonomicScope) {\r
- result.addTaxonomicScope(taxon);\r
- }\r
-\r
- return result;\r
-\r
- } catch (CloneNotSupportedException e) {\r
- logger.warn("Object does not implement cloneable");\r
- e.printStackTrace();\r
- return null;\r
- }\r
-\r
- }\r
-\r
-}\r
+// $Id$
+/**
+ * Copyright (C) 2007 EDIT
+ * European Distributed Institute of Taxonomy
+ * http://www.e-taxonomy.eu
+ *
+ * The contents of this file are subject to the Mozilla Public License Version 1.1
+ * See LICENSE.TXT at the top of this package for the full license terms.
+ */
+
+package eu.etaxonomy.cdm.model.description;
+
+import java.io.PrintStream;
+import java.util.HashSet;
+import java.util.Set;
+
+import javax.persistence.Entity;
+import javax.persistence.FetchType;
+import javax.persistence.JoinColumn;
+import javax.persistence.JoinTable;
+import javax.persistence.ManyToMany;
+import javax.persistence.OneToOne;
+import javax.validation.constraints.NotNull;
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlElement;
+import javax.xml.bind.annotation.XmlElementWrapper;
+import javax.xml.bind.annotation.XmlIDREF;
+import javax.xml.bind.annotation.XmlRootElement;
+import javax.xml.bind.annotation.XmlSchemaType;
+import javax.xml.bind.annotation.XmlType;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.log4j.Logger;
+import org.hibernate.annotations.Cascade;
+import org.hibernate.annotations.CascadeType;
+import org.hibernate.envers.Audited;
+import org.hibernate.search.annotations.Indexed;
+
+import eu.etaxonomy.cdm.common.CdmUtils;
+import eu.etaxonomy.cdm.model.common.DefinedTerm;
+import eu.etaxonomy.cdm.model.common.IdentifiableEntity;
+import eu.etaxonomy.cdm.model.common.Language;
+import eu.etaxonomy.cdm.model.location.NamedArea;
+import eu.etaxonomy.cdm.model.occurrence.SpecimenOrObservationBase;
+import eu.etaxonomy.cdm.model.taxon.Taxon;
+import eu.etaxonomy.cdm.strategy.cache.description.PolytomousKeyDefaultCacheStrategy;
+import eu.etaxonomy.cdm.strategy.generate.PolytomousKeyGenerator;
+
+/**
+ * This class represents a fixed single-access key (dichotomous or
+ * polytomous) used to identify (assign a {@link Taxon taxon} to) a {@link SpecimenOrObservationBase
+ * specimen or observation}. The key may be written manually or may be generated automatically
+ * e.g. by the {@link PolytomousKeyGenerator}. The different paths to the taxa are expressed
+ * by a decision graph consisting of {@link PolytomousKeyNode
+ * PolytomousKeyNodes}. The root node of such graph is accessible by
+ * {@link #getRoot()}. Refer to {@link PolytomousKeyNode} for detailed
+ * documentation on the decision graph structure.
+ *
+ * @author h.fradin
+ * @created 13.08.2009
+ *
+ * @author a.mueller
+ * @version 2.0 (08.11.2010)
+ */
+@XmlAccessorType(XmlAccessType.FIELD)
+@XmlType(name = "PolytomousKey", propOrder = {
+ "coveredTaxa",
+ "taxonomicScope",
+ "geographicalScope",
+ "scopeRestrictions",
+ "root",
+ "startNumber"})
+@XmlRootElement(name = "PolytomousKey")
+@Entity
+@Indexed(index = "eu.etaxonomy.cdm.model.description.PolytomousKey")
+@Audited
+public class PolytomousKey extends IdentifiableEntity<PolytomousKeyDefaultCacheStrategy> implements IIdentificationKey {
+ private static final long serialVersionUID = -3368243754557343942L;
+ private static final Logger logger = Logger.getLogger(PolytomousKey.class);
+
+ @XmlElementWrapper(name = "CoveredTaxa")
+ @XmlElement(name = "CoveredTaxon")
+ @XmlIDREF
+ @XmlSchemaType(name = "IDREF")
+ @ManyToMany(fetch = FetchType.LAZY)
+ @NotNull
+ @Cascade({CascadeType.SAVE_UPDATE, CascadeType.MERGE})
+ private Set<Taxon> coveredTaxa = new HashSet<Taxon>();
+
+ @XmlElementWrapper(name = "TaxonomicScope")
+ @XmlElement(name = "Taxon")
+ @XmlIDREF
+ @XmlSchemaType(name = "IDREF")
+ @ManyToMany(fetch = FetchType.LAZY)
+ @JoinTable(name = "PolytomousKey_Taxon",
+ joinColumns = @JoinColumn(name = "polytomousKey_id"),
+ inverseJoinColumns = @JoinColumn(name = "taxon_id")
+ )
+ @NotNull
+ @Cascade({CascadeType.SAVE_UPDATE, CascadeType.MERGE})
+ private Set<Taxon> taxonomicScope = new HashSet<Taxon>();
+
+ @XmlElementWrapper(name = "GeographicalScope")
+ @XmlElement(name = "Area")
+ @XmlIDREF
+ @XmlSchemaType(name = "IDREF")
+ @ManyToMany(fetch = FetchType.LAZY)
+ @JoinTable(name = "PolytomousKey_NamedArea")
+ @NotNull
+ @Cascade({CascadeType.MERGE})
+ private Set<NamedArea> geographicalScope = new HashSet<NamedArea>();
+
+ @XmlElementWrapper(name = "ScopeRestrictions")
+ @XmlElement(name = "Restriction")
+ @XmlIDREF
+ @XmlSchemaType(name = "IDREF")
+ @ManyToMany(fetch = FetchType.LAZY)
+ @JoinTable(name = "PolytomousKey_Scope")
+ @NotNull
+ @Cascade({CascadeType.MERGE})
+ private Set<DefinedTerm> scopeRestrictions = new HashSet<DefinedTerm>();
+
+ @XmlElement(name = "Root")
+ @OneToOne(fetch = FetchType.LAZY)
+ @Cascade({ CascadeType.SAVE_UPDATE, CascadeType.MERGE })
+ private PolytomousKeyNode root;
+
+ @XmlElement(name = "StartNumber")
+ @Audited
+ private int startNumber = 1;
+
+
+// ***************** STATIC METHODS ********************************/
+
+ /**
+ * Creates a new empty identification multi-access key instance.
+ */
+ public static PolytomousKey NewInstance() {
+ return new PolytomousKey();
+ }
+
+ /**
+ * Creates a new empty identification polytomous key instance.
+ */
+ public static PolytomousKey NewTitledInstance(String title) {
+ PolytomousKey result = new PolytomousKey();
+ result.setTitleCache(title, true);
+ return result;
+ }
+
+// ************************** CONSTRUCTOR ************************/
+
+ /**
+ * Class constructor: creates a new empty multi-access key instance.
+ */
+ protected PolytomousKey() {
+ super();
+ root = PolytomousKeyNode.NewInstance();
+ root.setNodeNumber(getStartNumber());
+ root.setKey(this);
+ this.cacheStrategy = PolytomousKeyDefaultCacheStrategy.NewInstance();
+ }
+
+ // ************************ GETTER/ SETTER
+
+ /**
+ * Returns the topmost {@link PolytomousKeyNode polytomous key node} (root
+ * node) of <i>this</i> polytomous key. The root node does not have any
+ * parent. Since polytomous key nodes recursively point to their child nodes
+ * the complete polytomous key is defined by its root node.
+ */
+ public PolytomousKeyNode getRoot() {
+ return root;
+ }
+
+ /**
+ * This method should be used by Hibernate only. If we want to make this
+ * method public we have to think about biderionality and also what should
+ * happen with the old root node.
+ *
+ * @see #getRoot()
+ */
+ public void setRoot(PolytomousKeyNode root) {
+ this.root = root;
+ if (root != null){
+ root.setKey(this);
+ }
+ }
+
+ /**
+ * Returns the set of possible {@link Taxon taxa} corresponding to
+ * <i>this</i> identification key.
+ */
+ @Override
+ public Set<Taxon> getCoveredTaxa() {
+ if (coveredTaxa == null) {
+ this.coveredTaxa = new HashSet<Taxon>();
+ }
+ return coveredTaxa;
+ }
+
+ /**
+ * @see #getCoveredTaxa()
+ */
+ protected void setCoveredTaxa(Set<Taxon> coveredTaxa) {
+ this.coveredTaxa = coveredTaxa;
+ }
+
+ /**
+ * Adds a {@link Taxon taxa} to the set of {@link #getCoveredTaxa() covered
+ * taxa} corresponding to <i>this</i> identification key.
+ *
+ * @param taxon
+ * the taxon to be added to <i>this</i> identification key
+ * @see #getCoveredTaxa()
+ */
+ @Override
+ public void addCoveredTaxon(Taxon taxon) {
+ this.coveredTaxa.add(taxon);
+ }
+
+ /**
+ * Removes one element from the set of {@link #getCoveredTaxa() covered
+ * taxa} corresponding to <i>this</i> identification key.
+ *
+ * @param taxon
+ * the taxon which should be removed
+ * @see #getCoveredTaxa()
+ * @see #addCoveredTaxon(Taxon)
+ */
+ @Override
+ public void removeCoveredTaxon(Taxon taxon) {
+ this.coveredTaxa.remove(taxon);
+ }
+
+ /**
+ * Returns the set of {@link NamedArea named areas} indicating the
+ * geospatial data where <i>this</i> identification key is valid.
+ */
+ @Override
+ public Set<NamedArea> getGeographicalScope() {
+ if (geographicalScope == null) {
+ this.geographicalScope = new HashSet<NamedArea>();
+ }
+ return geographicalScope;
+ }
+
+ /**
+ * Adds a {@link NamedArea geoScope} to the set of {@link #getGeoScopes()
+ * geogspatial scopes} corresponding to <i>this</i> identification key.
+ *
+ * @param geoScope
+ * the named area to be added to <i>this</i> identification key
+ * @see #getGeoScopes()
+ */
+ @Override
+ public void addGeographicalScope(NamedArea geoScope) {
+ this.geographicalScope.add(geoScope);
+ }
+
+ /**
+ * Removes one element from the set of {@link #getGeoScopes() geogspatial
+ * scopes} corresponding to <i>this</i> identification key.
+ *
+ * @param geoScope
+ * the named area which should be removed
+ * @see #getGeoScopes()
+ * @see #addGeoScope(NamedArea)
+ */
+ @Override
+ public void removeGeographicalScope(NamedArea geoScope) {
+ this.geographicalScope.remove(geoScope);
+ }
+
+ /**
+ * Returns the set of {@link Taxon taxa} that define the taxonomic scope of
+ * <i>this</i> identification key
+ */
+ @Override
+ public Set<Taxon> getTaxonomicScope() {
+ if (taxonomicScope == null) {
+ this.taxonomicScope = new HashSet<Taxon>();
+ }
+ return taxonomicScope;
+ }
+
+ /**
+ * Adds a {@link Taxon taxa} to the set of {@link #getTaxonomicScope()
+ * taxonomic scopes} corresponding to <i>this</i> identification key.
+ *
+ * @param taxon
+ * the taxon to be added to <i>this</i> identification key
+ * @see #getTaxonomicScope()
+ */
+ @Override
+ public void addTaxonomicScope(Taxon taxon) {
+ this.taxonomicScope.add(taxon);
+ }
+
+ /**
+ * Removes one element from the set of {@link #getTaxonomicScope() taxonomic
+ * scopes} corresponding to <i>this</i> identification key.
+ *
+ * @param taxon
+ * the taxon which should be removed
+ * @see #getTaxonomicScope()
+ * @see #addTaxonomicScope(Taxon)
+ */
+ @Override
+ public void removeTaxonomicScope(Taxon taxon) {
+ this.taxonomicScope.remove(taxon);
+ }
+
+ /**
+ * Returns the set of {@link Scope scope restrictions} corresponding to
+ * <i>this</i> identification key
+ */
+ @Override
+ public Set<DefinedTerm> getScopeRestrictions() {
+ if (scopeRestrictions == null) {
+ this.scopeRestrictions = new HashSet<DefinedTerm>();
+ }
+ return scopeRestrictions;
+ }
+
+ /**
+ * Adds a {@link Scope scope restriction} to the set of
+ * {@link #getScopeRestrictions() scope restrictions} corresponding to
+ * <i>this</i> identification key.
+ *
+ * @param scopeRestriction
+ * the scope restriction to be added to <i>this</i>
+ * identification key
+ * @see #getScopeRestrictions()
+ */
+ @Override
+ public void addScopeRestriction(DefinedTerm scopeRestriction) {
+ this.scopeRestrictions.add(scopeRestriction);
+ }
+
+ /**
+ * Removes one element from the set of {@link #getScopeRestrictions() scope
+ * restrictions} corresponding to <i>this</i> identification key.
+ *
+ * @param scopeRestriction
+ * the scope restriction which should be removed
+ * @see #getScopeRestrictions()
+ * @see #addScopeRestriction(Scope)
+ */
+ @Override
+ public void removeScopeRestriction(DefinedTerm scopeRestriction) {
+ this.scopeRestrictions.remove(scopeRestriction);
+ }
+
+
+ /**
+ * The first number for the automated numbering of {@link PolytomousKeyNode key nodes}.
+ * Default value is 1.
+ * @return
+ */
+ public int getStartNumber() {
+ return startNumber;
+ }
+
+ /**
+ * @see #getStartNumber()
+ * @param startNumber
+ */
+ public void setStartNumber(int startNumber) {
+ this.startNumber = startNumber;
+ }
+
+ // ******************** toString *****************************************/
+
+ private class IntegerObject {
+ int number = 0;
+
+ int inc() {
+ return number++;
+ };
+
+ @Override
+ public String toString() {
+ return String.valueOf(number);
+ }
+ }
+
+ public String print(PrintStream stream) {
+ String title = this.getTitleCache() + "\n";
+ String strPrint = title;
+
+ if (stream != null) {
+ stream.print(title);
+ }
+
+ PolytomousKeyNode root = this.getRoot();
+ strPrint += printNode(root, null, " ", stream);
+ return strPrint;
+ }
+
+ /**
+ * TODO this is a preliminary implementation
+ *
+ * @param node
+ * @param identation
+ * @param no
+ * @param myNumber
+ * @param stream
+ * @return
+ */
+ private String printNode(PolytomousKeyNode node, PolytomousKeyNode parent2,
+ String identation, PrintStream stream) {
+ String separator = ", ";
+
+ String result = identation + node.getNodeNumber() + ". ";
+ if (node != null) {
+ // key choice
+ String question = null;
+ String feature = null;
+ if (node.getQuestion() != null) {
+ question = node.getQuestion().getLabelText(Language.DEFAULT());
+ }
+ if (node.getFeature() != null) {
+ feature = node.getFeature().getLabel(Language.DEFAULT());
+ }
+ result += CdmUtils.concat(" - ", question, feature) + "\n";
+ ;
+
+ // Leads
+ char nextCounter = 'a';
+ for (PolytomousKeyNode child : node.getChildren()) {
+ String leadNumber = String.valueOf(nextCounter++);
+ if (child.getStatement() != null) {
+ String statement = child.getStatement().getLabelText(
+ Language.DEFAULT());
+ result += identation + " " + leadNumber + ") "
+ + (statement == null ? "" : (statement));
+ result += " ... ";
+ // child node
+ if (!child.isLeaf()) {
+ result += child.getNodeNumber() + separator;
+ }
+ // taxon
+ if (child.getTaxon() != null) {
+ String strTaxon = "";
+ if (child.getTaxon().getName() != null) {
+ strTaxon = child.getTaxon().getName()
+ .getTitleCache();
+ } else {
+ strTaxon = child.getTaxon().getTitleCache();
+ }
+ result += strTaxon + separator;
+ }
+ // subkey
+ if (child.getSubkey() != null) {
+ String subkey = child.getSubkey().getTitleCache();
+ result += subkey + separator;
+ }
+ // other node
+ if (child.getOtherNode() != null) {
+ PolytomousKeyNode otherNode = child.getOtherNode();
+ String otherNodeString = null;
+ if (child.getKey().equals(otherNode.getKey())) {
+ otherNodeString = String.valueOf(otherNode
+ .getNodeNumber());
+ } else {
+ otherNodeString = otherNode.getKey() + " "
+ + otherNode.getNodeNumber();
+ }
+ result += otherNodeString + separator;
+ }
+
+ result = StringUtils.chomp(result, separator);
+ result += "\n";
+ }
+ }
+
+ if (stream != null) {
+ stream.print(result);
+ }
+ for (PolytomousKeyNode child : node.getChildren()) {
+ if (!child.isLeaf()) {
+ result += printNode(child, node, identation + "", stream);
+ }
+ }
+ }
+ return result;
+ }
+
+ //
+ // public List<PolytomousKeyNode> getChildren() {
+ // return getRoot().getChildren();
+ // }
+
+ // *********************** CLONE ************************************/
+
+ /**
+ * Clones <i>this</i> PolytomousKey. This is a shortcut that enables to
+ * create a new instance that differs only slightly from <i>this</i>
+ * PolytomousKey by modifying only some of the attributes.
+ *
+ * @see eu.etaxonomy.cdm.model.common.IdentifiableEntity#clone()
+ * @see java.lang.Object#clone()
+ */
+ @Override
+ public Object clone() {
+ PolytomousKey result;
+
+ try {
+ result = (PolytomousKey) super.clone();
+
+ result.coveredTaxa = new HashSet<Taxon>();
+ for (Taxon taxon : this.coveredTaxa) {
+ result.addCoveredTaxon(taxon);
+ }
+
+ result.geographicalScope = new HashSet<NamedArea>();
+ for (NamedArea area : this.geographicalScope) {
+ result.addGeographicalScope(area);
+ }
+
+ result.root = (PolytomousKeyNode) this.root.clone();
+
+ result.scopeRestrictions = new HashSet<DefinedTerm>();
+ for (DefinedTerm scope : this.scopeRestrictions) {
+ result.addScopeRestriction(scope);
+ }
+
+ result.taxonomicScope = new HashSet<Taxon>();
+ for (Taxon taxon : this.taxonomicScope) {
+ result.addTaxonomicScope(taxon);
+ }
+
+ return result;
+
+ } catch (CloneNotSupportedException e) {
+ logger.warn("Object does not implement cloneable");
+ e.printStackTrace();
+ return null;
+ }
+
+ }
+
+}
import javax.persistence.Entity;
import javax.persistence.FetchType;
+import javax.persistence.JoinColumn;
import javax.persistence.JoinTable;
import javax.persistence.ManyToMany;
import javax.persistence.ManyToOne;
@XmlIDREF
@XmlSchemaType(name="IDREF")
@ManyToMany(fetch = FetchType.LAZY)
- @JoinTable(name="DescriptionBase_Scope")
+ //preliminary #5369
+ @JoinTable(
+ name="DescriptionBase_Scope",
+ joinColumns = @JoinColumn( name="DescriptionBase_id")
+ )
private Set<DefinedTerm> scopes = new HashSet<DefinedTerm>();
@XmlElementWrapper( name = "GeoScopes")
@XmlIDREF
@XmlSchemaType(name="IDREF")
@ManyToMany(fetch = FetchType.LAZY)
- @JoinTable(name="DescriptionBase_GeoScope")
+ //preliminary #5369
+ @JoinTable(
+ name="DescriptionBase_GeoScope",
+ joinColumns = @JoinColumn( name="DescriptionBase_id")
+ )
@Cascade({CascadeType.SAVE_UPDATE,CascadeType.MERGE})
private Set<NamedArea> geoScopes = new HashSet<NamedArea>();
import javax.persistence.Entity;
import javax.persistence.FetchType;
+import javax.persistence.JoinColumn;
+import javax.persistence.JoinTable;
import javax.persistence.ManyToOne;
import javax.persistence.MapKeyJoinColumn;
import javax.persistence.OneToMany;
* @version 1.0
* @created 08-Nov-2007 13:06:59
*/
+@SuppressWarnings("unused")
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "TextData", propOrder = {
"multilanguageText",
@XmlJavaTypeAdapter(MultilanguageTextAdapter.class)
@OneToMany (fetch= FetchType.LAZY, orphanRemoval=true)
@MapKeyJoinColumn(name="multilanguagetext_mapkey_id")
+ //preliminary #5369
+ @JoinTable(joinColumns = @JoinColumn( name="DescriptionElementBase_id"))
@Cascade({CascadeType.SAVE_UPDATE,CascadeType.MERGE, CascadeType.DELETE})
@Field(name="text", store=Store.YES)
@FieldBridge(impl=MultilanguageTextFieldBridge.class)
/**
* Copyright (C) 2007 EDIT
-* European Distributed Institute of Taxonomy
+* European Distributed Institute of Taxonomy
* http://www.e-taxonomy.eu
-*
+*
* The contents of this file are subject to the Mozilla Public License Version 1.1
* See LICENSE.TXT at the top of this package for the full license terms.
*/
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.FetchType;
+import javax.persistence.JoinColumn;
import javax.persistence.JoinTable;
import javax.persistence.ManyToMany;
import javax.xml.bind.annotation.XmlAccessType;
@XmlAttribute(name = "iso3166_A2")
@Column(length=2)
private String iso3166_A2;
-
+
@XmlElementWrapper(name = "Continents")
@XmlElement(name = "Continent")
@XmlIDREF
@XmlSchemaType(name = "IDREF")
@ManyToMany(fetch = FetchType.LAZY)
- @JoinTable(name="DefinedTermBase_Continent")
- private Set<NamedArea> continents = new HashSet<NamedArea>();
-
+ //preliminary #5369
+ @JoinTable(
+ name="DefinedTermBase_Continent",
+ joinColumns = @JoinColumn( name="DefinedTermBase_id")
+ )
+ private final Set<NamedArea> continents = new HashSet<NamedArea>();
+
protected static Map<UUID, NamedArea> termMap = null;
protected static Map<String, UUID> labelMap = null;
protected static Map<String, UUID> isoA2Map = null;
-
+
public static final UUID uuidCountryVocabulary = UUID.fromString("006b1870-7347-4624-990f-e5ed78484a1a");
-
+
private static final UUID uuidAfghanistan = UUID.fromString("974ce01a-5bce-4be8-b728-a46869354960");
private static final UUID uuidAlbaniaPeoplesSocialistRepublicof = UUID.fromString("238a6a93-8857-4fd6-af9e-6437c90817ac");
private static final UUID uuidAlgeriaPeoplesDemocraticRepublicof = UUID.fromString("a14b38ac-e963-4c1a-85c2-de1f17f8c72a");
public static final Country ZAMBIAREPUBLICOF () { return (Country)termMap.get(Country.uuidZambiaRepublicof );}
public static final Country ZIMBABWE () { return (Country)termMap.get(Country.uuidZimbabwe );}
-
-//****************** FACTORY METHODS ******************************/
-
+
+//****************** FACTORY METHODS ******************************/
+
/**
* Factory method
* @return
public static Country NewInstance(){
return new Country();
}
-
-
+
+
/**
* Factory method
* @return
public static Country NewInstance(String term, String label, String labelAbbrev){
return new Country(term, label, labelAbbrev);
}
-
-//********************************** Constructor *********************************/
+
+//********************************** Constructor *********************************/
//for hibernate use only
@Deprecated
}
//***************************** METHODS *****************************************/
-
+
public Set<NamedArea> getContinents() {
return continents;
}
public void addContinent(NamedArea continent) {
this.continents.add(continent);
}
-
+
public void removeContinent(NamedArea continent) {
this.continents.remove(continent);
}
}
/**
- * Set 2 character ISO 3166 Country code
+ * Set 2 character ISO 3166 Country code
* @param iso3166_A2 a String representation of the ISO 3166 code
*/
public void setIso3166_A2(String iso3166_A2){
String text = csvLine.get(3).trim();
String abbreviatedLabel = csvLine.get(2);
newInstance.addRepresentation(Representation.NewInstance(text, label, abbreviatedLabel, lang) );
-
+
// iso codes extra
newInstance.setIso3166_A2(csvLine.get(4).trim());
newInstance.setIdInVocabulary(abbreviatedLabel);
-
-
+
+
String[] continentList;
String tmp = csvLine.get(5).trim();
if (tmp.length()>2){
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
- }
+ }
return null;
}
-
+
public void writeCsvLine(CSVWriter writer) {
String [] line = new String[6];
line[0] = getUuid().toString();
line[4] = this.getIso3166_A2().toString();
line[5] = this.getContinents().toString();
writer.writeNext(line);
- }
+ }
public static boolean isCountryLabel(String label) {
if (labelMap.containsKey(label)){
}
-
+
//************************** METHODS ********************************
-
-
+
+
private static void initMaps(){
labelMap = new HashMap<String, UUID>();
termMap = new HashMap<UUID, NamedArea>();
isoA2Map = new HashMap<String, UUID>();
}
-
+
/**
* FIXME This class should really be refactored into an interface and service implementation,
* relying on TermVocabularyDao / service (Ben)
logger.info("Unknown country: " + CdmUtils.Nz(label));
return null;
}
- return (Country)termMap.get(uuid);
+ return (Country)termMap.get(uuid);
}
public static Country getCountryByIso3166A2(String isoA2) {
logger.info("Unknown country: " + CdmUtils.Nz(isoA2));
return null;
}
- return (Country)termMap.get(uuid);
+ return (Country)termMap.get(uuid);
}
-
+
/* (non-Javadoc)
* @see eu.etaxonomy.cdm.model.common.DefinedTermBase#resetTerms()
*/
}
-
+
@Override
protected void setDefaultTerms(TermVocabulary<NamedArea> termVocabulary) {
initMaps();
for (NamedArea term : termVocabulary.getTerms()){
- termMap.put(term.getUuid(), (NamedArea)term); //TODO casting
+ termMap.put(term.getUuid(), term); //TODO casting
}
for (NamedArea term : termVocabulary.getTerms()){
- labelMap.put(term.getLabel(), term.getUuid());
+ labelMap.put(term.getLabel(), term.getUuid());
}
for (NamedArea term : termVocabulary.getTerms()){
Country country = CdmBase.deproxy(term, Country.class);
- isoA2Map.put(country.getIso3166_A2(), term.getUuid());
+ isoA2Map.put(country.getIso3166_A2(), term.getUuid());
}
-
+
}
-
+
}
\ No newline at end of file
import javax.persistence.Entity;
import javax.persistence.FetchType;
+import javax.persistence.JoinColumn;
import javax.persistence.JoinTable;
import javax.persistence.ManyToMany;
import javax.persistence.ManyToOne;
private Point pointApproximation;
@ManyToMany(fetch = FetchType.LAZY)
- @JoinTable(name="DefinedTermBase_Country")
+ //preliminary #5369
+ @JoinTable(
+ name="DefinedTermBase_Country",
+ joinColumns = @JoinColumn( name="DefinedTermBase_id")
+ )
private final Set<Country> countries = new HashSet<Country>();
@ManyToOne(fetch = FetchType.LAZY)
/**
* Copyright (C) 2007 EDIT
-* European Distributed Institute of Taxonomy
+* European Distributed Institute of Taxonomy
* http://www.e-taxonomy.eu
-*
+*
* The contents of this file are subject to the Mozilla Public License Version 1.1
* See LICENSE.TXT at the top of this package for the full license terms.
*/
import javax.persistence.Entity;
import javax.persistence.FetchType;
+import javax.persistence.JoinColumn;
+import javax.persistence.JoinTable;
import javax.persistence.ManyToMany;
import javax.validation.constraints.NotNull;
import javax.xml.bind.annotation.XmlAccessType;
import eu.etaxonomy.cdm.model.media.Media;
/**
- * "A phylogenetic tree or evolutionary tree is a branching diagram or "tree" showing the
- * inferred evolutionary relationships among various biological species or other entities
- * based upon similarities and differences in their physical and/or genetic characteristics.
+ * "A phylogenetic tree or evolutionary tree is a branching diagram or "tree" showing the
+ * inferred evolutionary relationships among various biological species or other entities
+ * based upon similarities and differences in their physical and/or genetic characteristics.
* The taxa joined together in the tree are implied to have descended from a common ancestor."
* (Wikipedia).
* <BR> In the CDM we currently store phylogenetic trees only as media. This may change in future.
- *
+ *
* @author m.doering
* @created 08-Nov-2007
*/
public class PhylogeneticTree extends Media implements Cloneable{
private static final long serialVersionUID = -7020182117362324067L;
private static final Logger logger = Logger.getLogger(PhylogeneticTree.class);
-
-
+
+
@XmlElementWrapper(name = "UsedSequences")
@XmlElement(name = "UsedSequence")
@XmlIDREF
@XmlSchemaType(name = "IDREF")
@ManyToMany(fetch = FetchType.LAZY)
+ //preliminary #5369
+ @JoinTable(
+ joinColumns = @JoinColumn( name="Media_id")
+ )
@NotNull
private Set<Sequence> usedSequences = new HashSet<Sequence>();
-
+
//********************** Factory Method **********************************/
-
+
/**
* Factory method
* @return
return new PhylogeneticTree();
}
-
+
//***************** Constructor ****************************/
-
+
private PhylogeneticTree(){
super();
}
-
-
+
+
// ********************** GETTER / SETTER **************************/
-
+
public Set<Sequence> getUsedSequences() {
if(usedSequences == null) {
this.usedSequences = new HashSet<Sequence>();
public void addUsedSequences(Sequence usedSequence) {
this.usedSequences.add(usedSequence);
}
-
+
public void removeUsedSequences(Sequence usedSequence) {
this.usedSequences.remove(usedSequence);
-
+
}
-
-//*********** CLONE **********************************/
-
- /**
+
+//*********** CLONE **********************************/
+
+ /**
* Clones <i>this</i> phylogenetic tree. This is a shortcut that enables to
* create a new instance that differs only slightly from <i>this</i> phylogenetic tree
* by modifying only some of the attributes.<BR>
* This method overrides the clone method from {@link Media Media}.
- *
+ *
* @see eu.etaxonomy.cdm.model.media.Media#clone()
* @see java.lang.Object#clone()
*/
@Override
-
+
public Object clone(){
PhylogeneticTree result;
try{
for (Sequence seq: this.usedSequences){
result.addUsedSequences((Sequence)seq.clone());
}
-
+
return result;
}catch (CloneNotSupportedException e) {
logger.warn("Object does not implement cloneable");
*
* The contents of this file are subject to the Mozilla Public License Version 1.1
* See LICENSE.TXT at the top of this package for the full license terms.
-*/
+*/
package eu.etaxonomy.cdm.model.view;
@Entity
@RevisionEntity
public class AuditEvent implements Serializable {
-/**
- *
- */
+
private static final long serialVersionUID = 6584537382484488953L;
public static final AuditEvent CURRENT_VIEW;
-
+
static {
CURRENT_VIEW = new AuditEvent();
CURRENT_VIEW.setUuid(UUID.fromString("966728f0-ae51-11dd-ad8b-0800200c9a66"));
- };
-
+ }
+
@Type(type="uuidUserType")
private UUID uuid;
-
+
+ @Id
+ @GeneratedValue(generator = "custom-enhanced-table") //see also CdmBase.id
+ @RevisionNumber
+ private Integer revisionNumber;
+
+ @Type(type="dateTimeUserType")
+ @Basic(fetch = FetchType.LAZY)
+ private DateTime date;
+
+ @RevisionTimestamp
+ private Long timestamp;
+
+//**************** CONSTRUCTOR ********************************/
+
+ public AuditEvent() {
+ this.uuid = UUID.randomUUID();
+ this.date = new DateTime();
+ }
+
+//***************** GETTER/ SETTER *************************/
+
public UUID getUuid() {
return uuid;
}
+ public void setUuid(UUID uuid) {
+ this.uuid = uuid;
+ }
public Long getTimestamp() {
return timestamp;
}
- public AuditEvent() {
- this.uuid = UUID.randomUUID();
- this.date = new DateTime();
- }
-
- @Type(type="dateTimeUserType")
- @Basic(fetch = FetchType.LAZY)
- private DateTime date;
-
+
public DateTime getDate() {
return date;
}
-
public void setDate(DateTime date) {
this.date = date;
}
- @Id
- @GeneratedValue
- @RevisionNumber
- private Integer revisionNumber;
-
- @RevisionTimestamp
- private Long timestamp;
-
public Integer getRevisionNumber() {
return revisionNumber;
}
-
- public void setUuid(UUID uuid) {
- this.uuid = uuid;
- }
-
+ public void setRevisionNumber(Integer revisionNumber) {
+ this.revisionNumber = revisionNumber;
+ }
+
+// ****************** Overrides **************************/
+
+ @Override
public boolean equals(Object obj) {
- if(this == obj)
- return true;
-
- if((obj == null) || (obj.getClass() != this.getClass()))
- return false;
-
+ if(this == obj) {
+ return true;
+ }
+
+ if((obj == null) || (obj.getClass() != this.getClass())) {
+ return false;
+ }
+
AuditEvent auditEvent = (AuditEvent) obj;
- return uuid == auditEvent.uuid || (uuid != null && uuid.equals(auditEvent.uuid));
+ return uuid == auditEvent.uuid || (uuid != null && uuid.equals(auditEvent.uuid));
}
-
- public int hashCode() {
+
+ @Override
+ public int hashCode() {
int hash = 7;
hash = 31 * hash + (null == uuid ? 0 : uuid.hashCode());
return hash;
}
- public void setRevisionNumber(Integer revisionNumber) {
- this.revisionNumber = revisionNumber;
- }
}
*
* The contents of this file are subject to the Mozilla Public License Version 1.1
* See LICENSE.TXT at the top of this package for the full license terms.
-*/
+*/
package eu.etaxonomy.cdm.model.view.context;
import eu.etaxonomy.cdm.model.view.AuditEvent;
/**
- * Class based heavily on SecurityContextImpl, part
- * of spring-security, but instead binding a View object to the
+ * Class based heavily on SecurityContextImpl, part
+ * of spring-security, but instead binding a View object to the
* context.
- *
+ *
* @author ben
* @author Ben Alex
*
*/
public class AuditEventContextImpl implements AuditEventContext {
-
- private AuditEvent auditEvent;
+ private static final long serialVersionUID = 4477662916416534368L;
+
+ private AuditEvent auditEvent;
+ @Override
public AuditEvent getAuditEvent() {
return auditEvent;
}
- public void setAuditEvent(AuditEvent auditEvent) {
+ @Override
+ public void setAuditEvent(AuditEvent auditEvent) {
this.auditEvent = auditEvent;
}
-
- public boolean equals(Object obj) {
+
+ @Override
+ public boolean equals(Object obj) {
if (obj instanceof AuditEventContextImpl) {
AuditEventContextImpl test = (AuditEventContextImpl) obj;
-
+
if ((this.getAuditEvent() == null) && (test.getAuditEvent() == null)) {
return true;
}
-
+
if ((this.getAuditEvent() != null) && (test.getAuditEvent() != null)
&& this.getAuditEvent().equals(test.getAuditEvent())) {
- return true;
+ return true;
}
}
-
+
return false;
}
-
+
+ @Override
public int hashCode() {
if (this.auditEvent == null) {
return -1;
} else {
- return this.auditEvent.hashCode();
+ return this.auditEvent.hashCode();
}
}
-
+
+ @Override
public String toString() {
StringBuffer sb = new StringBuffer();
sb.append(super.toString());
-
+
if (this.auditEvent == null) {
sb.append(": Null auditEvent");
} else {
sb.append(": AuditEvent: ").append(this.auditEvent);
}
-
+
return sb.toString();
}
}
### *** SPRING ************ ###
log4j.logger.org.springframework.transaction = warn
-log4j.logger.org.springframework.orm.hibernate4.SessionFactoryUtils = info
-log4j.logger.org.springframework.orm.hibernate4 = info
+log4j.logger.org.springframework.orm.hibernate5 = info
log4j.logger.org.springframework.FileSystemXmlApplicationContext = warn
log4j.logger.org.springframework.core.io.support = info
static Logger logger = Logger.getLogger(NewEntityListenerTest.class);
private Object lastPropValue;
- /* (non-Javadoc)
- * @see eu.etaxonomy.cdm.model.NewEntityListener#onCreate(eu.etaxonomy.cdm.model.common.CdmBase)
- */
@Override
public void onCreate(CdmBase cdmBase) {
- logger.warn("New Entity " + cdmBase + " created");
+ logger.info("New Entity " + cdmBase + " created");
lastPropValue = cdmBase;
}
@Test
public void testPropertyChange() {
- NonViralName.setNewEntityListener(this);
- NonViralName b = NonViralName.NewInstance(Rank.SPECIES());
+ CdmBase.setNewEntityListener(this);
+
+ NonViralName<?> b = NonViralName.NewInstance(Rank.SPECIES());
Annotation newAnnotation = Annotation.NewDefaultLanguageInstance("test");
b.addAnnotation(newAnnotation);
Assert.assertEquals(newAnnotation, lastPropValue);
### *** SPRING ************ ###
log4j.logger.org.springframework.transaction = warn
-log4j.logger.org.springframework.orm.hibernate4.SessionFactoryUtils = info
-log4j.logger.org.springframework.orm.hibernate4 = info
+log4j.logger.org.springframework.orm.hibernate5.SessionFactoryUtils = info
+log4j.logger.org.springframework.orm.hibernate5 = info
log4j.logger.org.springframework.FileSystemXmlApplicationContext = warn
log4j.logger.org.springframework.core.io.support = info
<components>
<component>
<name>hbm2ddl</name>
- <implementation>annotationconfiguration</implementation>
+ <implementation>annotationconfigurationX</implementation>
</component>
</components>
<componentProperties>
<groupId>eu.etaxonomy</groupId>
<artifactId>cdmlib-model</artifactId>
</dependency>
- <dependency>
- <groupId>eu.etaxonomy</groupId>
- <artifactId>cdmlib-commons</artifactId>
- </dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<dependency>
<groupId>com.mchange</groupId>
<artifactId>c3p0</artifactId>
- <version>0.9.2</version>
</dependency>
<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-spellchecker</artifactId>
</dependency>
- <dependency>
+ <dependency>
<groupId>commons-beanutils</groupId>
<artifactId>commons-beanutils</artifactId>
</dependency>
<artifactId>unitils-core</artifactId>
<!-- <scope>test</scope> unscoped since we need this dependency at compile time for H2DbSupport.java -->
</dependency>
+
+<!-- <dependency> -->
+<!-- <groupId>eu.etaxonomy</groupId> -->
+<!-- <artifactId>cdmlib-test</artifactId> -->
+<!-- <scope>test</scope> -->
+<!-- </dependency> -->
<dependency>
<groupId>org.unitils</groupId>
<artifactId>unitils-database</artifactId>
<groupId>org.springframework.security</groupId>
<artifactId>spring-security-core</artifactId>
</dependency>
- <!-- required by Xerces 2.11.0 -->
- <dependency>
- <groupId>org.apache.xmlgraphics</groupId>
- <artifactId>batik-ext</artifactId>
- </dependency>
+ <!-- required by Xerces 2.11.0 -->
+ <dependency>
+ <groupId>org.apache.xmlgraphics</groupId>
+ <artifactId>batik-ext</artifactId>
+ </dependency>
<!-- ******* DATABASES DRIVER ******* -->
<dependency>
*/
package eu.etaxonomy.cdm.config;
-import org.apache.lucene.util.Version;
/**
* @author a.kohlbecker
*/
public class Configuration {
- /**
- * This should be set via the hibernate properties but at the time being it
- * is quite complex to get <code>hibernate.search.lucene_version</code> from
- * the configuration and to pass it to all classes which require this
- * version As a preliminary and unobtrusive solution the luceneVersion is
- * now provided by this.
- * <p>
- * TODO A better solution for the future would be to provide all Lucene
- * related instances of <code>LuceneSearch</code>, etc via a special
- * factory. This factors would be a spring bean and thus could have access
- * to the hibernate configuration. see #3369 (Lucene search factory or builder implemented)
- *
- */
- @Deprecated // so we now it is not 100% save to use this
- public static Version luceneVersion = Version.LUCENE_4_10_4;
-
/**
* Login name for the first user 'admin'
*/
-/**\r
-* Copyright (C) 2009 EDIT\r
-* European Distributed Institute of Taxonomy\r
-* http://www.e-taxonomy.eu\r
-*\r
-* The contents of this file are subject to the Mozilla Public License Version 1.1\r
-* See LICENSE.TXT at the top of this package for the full license terms.\r
-*/ \r
-\r
-package eu.etaxonomy.cdm.database;\r
-\r
-import java.util.Map;\r
-\r
-import org.apache.log4j.Logger;\r
-import org.hibernate.SessionFactory;\r
-import org.springframework.beans.factory.annotation.Autowired;\r
-import org.springframework.jdbc.datasource.SimpleDriverDataSource;\r
-import org.springframework.orm.hibernate4.SessionFactoryUtils;\r
-import org.springframework.stereotype.Component;\r
-\r
-@Component\r
-public class DataSourceReloader {\r
- \r
- static final Logger logger = Logger.getLogger(DataSourceReloader.class);\r
-\r
- private SessionFactory factory;\r
- \r
- @Autowired\r
- public void setSessionFacory(SessionFactory factory){\r
- this.factory = factory;\r
- }\r
- \r
- public UpdatableRoutingDataSource getDataSource() {\r
- UpdatableRoutingDataSource as = (UpdatableRoutingDataSource)SessionFactoryUtils.getDataSource(factory);\r
- return as;\r
- }\r
- \r
- public Map<String,DataSourceInfo> reload() {\r
- return getDataSource().updateDataSources();\r
- }\r
- \r
- public Map<String,DataSourceInfo> test() {\r
- Map<String,SimpleDriverDataSource> dataSources = getDataSource().loadDataSources();\r
- Map<String, DataSourceInfo> dataSourceInfos = getDataSource().testDataSources(dataSources);\r
- return dataSourceInfos;\r
- }\r
- \r
-}\r
+/**
+* Copyright (C) 2009 EDIT
+* European Distributed Institute of Taxonomy
+* http://www.e-taxonomy.eu
+*
+* The contents of this file are subject to the Mozilla Public License Version 1.1
+* See LICENSE.TXT at the top of this package for the full license terms.
+*/
+
+package eu.etaxonomy.cdm.database;
+
+import java.util.Map;
+
+import org.apache.log4j.Logger;
+import org.hibernate.SessionFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.jdbc.datasource.SimpleDriverDataSource;
+import org.springframework.orm.hibernate5.SessionFactoryUtils;
+import org.springframework.stereotype.Component;
+
+@Component
+public class DataSourceReloader {
+
+ static final Logger logger = Logger.getLogger(DataSourceReloader.class);
+
+ private SessionFactory factory;
+
+ @Autowired
+ public void setSessionFacory(SessionFactory factory){
+ this.factory = factory;
+ }
+
+ public UpdatableRoutingDataSource getDataSource() {
+ UpdatableRoutingDataSource as = (UpdatableRoutingDataSource)SessionFactoryUtils.getDataSource(factory);
+ return as;
+ }
+
+ public Map<String,DataSourceInfo> reload() {
+ return getDataSource().updateDataSources();
+ }
+
+ public Map<String,DataSourceInfo> test() {
+ Map<String,SimpleDriverDataSource> dataSources = getDataSource().loadDataSources();
+ Map<String, DataSourceInfo> dataSourceInfos = getDataSource().testDataSources(dataSources);
+ return dataSourceInfos;
+ }
+
+}
-\r
-package eu.etaxonomy.cdm.database;\r
-\r
-import java.util.EnumSet;\r
-\r
-import org.apache.log4j.Logger;\r
-import org.hibernate.HibernateException;\r
-import org.springframework.security.core.Authentication;\r
-\r
-import eu.etaxonomy.cdm.model.common.CdmBase;\r
-import eu.etaxonomy.cdm.persistence.hibernate.permission.CRUD;\r
-import eu.etaxonomy.cdm.persistence.hibernate.permission.Operation;\r
-import eu.etaxonomy.cdm.persistence.hibernate.permission.Role;\r
-\r
-\r
-/**\r
- *\r
- * @author andreas\r
- * @date Sep 4, 2012\r
- *\r
- */\r
-public class PermissionDeniedException extends HibernateException {\r
-\r
- private static final long serialVersionUID = 1L;\r
-\r
- private static final Logger logger = Logger\r
- .getLogger(PermissionDeniedException.class);\r
-\r
- /**\r
- * @param message\r
- */\r
- public PermissionDeniedException(String message) {\r
- super(message);\r
- }\r
-\r
- public PermissionDeniedException(Authentication authentication, CdmBase entity, Operation requiredOperation) {\r
- super(requiredOperation + " not permitted for '" + authentication.getName()\r
- + "' on " + entity.getClass().getSimpleName() + "[uuid:" + entity.getUuid() + "', toString:'" + entity.toString() + "']");\r
- }\r
-\r
- public PermissionDeniedException(Authentication authentication, CdmBase entity, EnumSet<CRUD> requiredOperation) {\r
- super(requiredOperation + " not permitted for '" + authentication.getName()\r
- + "' on " + entity.getClass().getSimpleName() + "[uuid:" + entity.getUuid() + "', toString:'" + entity.toString() + "']");\r
- }\r
-\r
- public PermissionDeniedException(Authentication authentication, CdmBase entity, String requiredOperation) {\r
- super(requiredOperation + " not permitted for '" + authentication.getName()\r
- + "' on " + entity.getClass().getSimpleName() + "[uuid:" + entity.getUuid() + "', toString:'" + entity.toString() + "']");\r
- }\r
-\r
- /**\r
- * @param authentication\r
- * @param roles\r
- */\r
- public PermissionDeniedException(Authentication authentication, Role[] roles) {\r
-\r
- super("Permission denied for '" + authentication.getName()\r
- + "' none of the roles '" + roles + "' found in authentication.");\r
- }\r
-\r
- /**\r
- * @param cause\r
- */\r
- public PermissionDeniedException(Throwable cause) {\r
- super(cause);\r
- }\r
-\r
- /**\r
- * @param message\r
- * @param cause\r
- */\r
- public PermissionDeniedException(String message, Throwable cause) {\r
- super(message, cause);\r
- }\r
-\r
-}\r
+
+package eu.etaxonomy.cdm.database;
+
+import java.util.EnumSet;
+
+import org.apache.log4j.Logger;
+import org.hibernate.HibernateException;
+import org.springframework.security.core.Authentication;
+
+import eu.etaxonomy.cdm.model.common.CdmBase;
+import eu.etaxonomy.cdm.persistence.hibernate.permission.CRUD;
+import eu.etaxonomy.cdm.persistence.hibernate.permission.Operation;
+import eu.etaxonomy.cdm.persistence.hibernate.permission.Role;
+
+/**
+ * @author andreas
+ * @date Sep 4, 2012
+ */
+public class PermissionDeniedException extends HibernateException {
+ private static final long serialVersionUID = 6993452039967589921L;
+ @SuppressWarnings("unused")
+ private static final Logger logger = Logger.getLogger(PermissionDeniedException.class);
+
+ /**
+ * @param message
+ */
+ public PermissionDeniedException(String message) {
+ super(message);
+ }
+
+ public PermissionDeniedException(Authentication authentication, CdmBase entity, Operation requiredOperation) {
+ super(requiredOperation + " not permitted for '" + authentication.getName()
+ + "' on " + entity.getClass().getSimpleName() + "[uuid:" + entity.getUuid() + "', toString:'" + entity.toString() + "']");
+ }
+
+ public PermissionDeniedException(Authentication authentication, CdmBase entity, EnumSet<CRUD> requiredOperation) {
+ super(requiredOperation + " not permitted for '" + authentication.getName()
+ + "' on " + entity.getClass().getSimpleName() + "[uuid:" + entity.getUuid() + "', toString:'" + entity.toString() + "']");
+ }
+
+ public PermissionDeniedException(Authentication authentication, CdmBase entity, String requiredOperation) {
+ super(requiredOperation + " not permitted for '" + authentication.getName()
+ + "' on " + entity.getClass().getSimpleName() + "[uuid:" + entity.getUuid() + "', toString:'" + entity.toString() + "']");
+ }
+
+ /**
+ * @param authentication
+ * @param roles
+ */
+ public PermissionDeniedException(Authentication authentication, Role[] roles) {
+
+ super("Permission denied for '" + authentication.getName()
+ + "' none of the roles '" + roles + "' found in authentication.");
+ }
+
+ /**
+ * @param cause
+ */
+ public PermissionDeniedException(Throwable cause) {
+ super(cause);
+ }
+
+ /**
+ * @param message
+ * @param cause
+ */
+ public PermissionDeniedException(String message, Throwable cause) {
+ super(message, cause);
+ }
+
+}
txDefinition.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRED);
}
+ @Override
public void setOmit(boolean omit) {
this.omit = omit;
}
+ @Override
public boolean isOmit() {
return omit;
}
* loaded terms with their <code>UUID</code> as key
* @return the <code>UUID</code> of the loaded vocabulary as found in CSV file
*/
- public UUID firstPass(VocabularyEnum vocabularyType, Map<UUID, DefinedTermBase> persistedTerms) {
+ private UUID firstPass(VocabularyEnum vocabularyType, Map<UUID, DefinedTermBase> persistedTerms) {
logger.info("Loading terms for '" + vocabularyType.name() + "': " + vocabularyType.getClazz().getName());
Map<UUID,DefinedTermBase> terms = new HashMap<UUID,DefinedTermBase>();
-/**\r
-* Copyright (C) 2007 EDIT\r
-* European Distributed Institute of Taxonomy\r
-* http://www.e-taxonomy.eu\r
-*\r
-* The contents of this file are subject to the Mozilla Public License Version 1.1\r
-* See LICENSE.TXT at the top of this package for the full license terms.\r
-*/\r
-\r
-package eu.etaxonomy.cdm.persistence.dao.hibernate.agent;\r
-\r
-import java.util.ArrayList;\r
-import java.util.List;\r
-import java.util.UUID;\r
-\r
-import org.apache.log4j.Logger;\r
-import org.hibernate.Criteria;\r
-import org.hibernate.Query;\r
-import org.hibernate.Session;\r
-import org.hibernate.criterion.Restrictions;\r
-import org.hibernate.envers.query.AuditEntity;\r
-import org.hibernate.envers.query.AuditQuery;\r
-import org.springframework.stereotype.Repository;\r
-\r
-import eu.etaxonomy.cdm.model.agent.Address;\r
-import eu.etaxonomy.cdm.model.agent.AgentBase;\r
-import eu.etaxonomy.cdm.model.agent.Institution;\r
-import eu.etaxonomy.cdm.model.agent.InstitutionalMembership;\r
-import eu.etaxonomy.cdm.model.agent.Person;\r
-import eu.etaxonomy.cdm.model.agent.Team;\r
-import eu.etaxonomy.cdm.model.view.AuditEvent;\r
-import eu.etaxonomy.cdm.persistence.dao.agent.IAgentDao;\r
-import eu.etaxonomy.cdm.persistence.dao.hibernate.common.IdentifiableDaoBase;\r
-import eu.etaxonomy.cdm.persistence.dto.UuidAndTitleCache;\r
-\r
-\r
-@Repository\r
-public class AgentDaoImpl extends IdentifiableDaoBase<AgentBase> implements IAgentDao{\r
-\r
- @SuppressWarnings("unused")\r
- private static final Logger logger = Logger.getLogger(AgentDaoImpl.class);\r
-\r
- public AgentDaoImpl() {\r
- super(AgentBase.class);\r
- indexedClasses = new Class[3];\r
- indexedClasses[0] = Institution.class;\r
- indexedClasses[1] = Person.class;\r
- indexedClasses[2] = Team.class;\r
- }\r
-\r
- @Override\r
- public List<Institution> getInstitutionByCode(String code) {\r
- AuditEvent auditEvent = getAuditEventFromContext();\r
- if(auditEvent.equals(AuditEvent.CURRENT_VIEW)) {\r
- Criteria crit = getSession().createCriteria(Institution.class);\r
- crit.add(Restrictions.eq("code", code));\r
- return crit.list();\r
- } else {\r
- AuditQuery query = getAuditReader().createQuery().forEntitiesAtRevision(Institution.class,auditEvent.getRevisionNumber());\r
- query.add(AuditEntity.property("code").eq(code));\r
- return query.getResultList();\r
- }\r
- }\r
-\r
- @Override\r
- public int countInstitutionalMemberships(Person person) {\r
- AuditEvent auditEvent = getAuditEventFromContext();\r
- if(auditEvent.equals(AuditEvent.CURRENT_VIEW)) {\r
- Query query = getSession().createQuery("select count(institutionalMembership) from InstitutionalMembership institutionalMembership where institutionalMembership.person = :person");\r
- query.setParameter("person", person);\r
- return ((Long)query.uniqueResult()).intValue();\r
- } else {\r
- AuditQuery query = getAuditReader().createQuery().forEntitiesAtRevision(InstitutionalMembership.class,auditEvent.getRevisionNumber());\r
- query.add(AuditEntity.relatedId("person").eq(person.getId()));\r
- query.addProjection(AuditEntity.id().count("id"));\r
- return ((Long)query.getSingleResult()).intValue();\r
- }\r
- }\r
-\r
- @Override\r
- public int countMembers(Team team) {\r
- checkNotInPriorView("AgentDaoImpl.countMembers(Team team)");\r
- Query query = getSession().createQuery("select count(teamMember) from Team team join team.teamMembers teamMember where team = :team");\r
- query.setParameter("team", team);\r
- return ((Long)query.uniqueResult()).intValue();\r
- }\r
-\r
- @Override\r
- public List<InstitutionalMembership> getInstitutionalMemberships(Person person, Integer pageSize, Integer pageNumber) {\r
- AuditEvent auditEvent = getAuditEventFromContext();\r
- if(auditEvent.equals(AuditEvent.CURRENT_VIEW)) {\r
- Query query = getSession().createQuery("select institutionalMembership from InstitutionalMembership institutionalMembership left join fetch institutionalMembership.institute where institutionalMembership.person = :person");\r
- query.setParameter("person", person);\r
- setPagingParameter(query, pageSize, pageNumber);\r
- return query.list();\r
- } else {\r
- AuditQuery query = getAuditReader().createQuery().forEntitiesAtRevision(InstitutionalMembership.class,auditEvent.getRevisionNumber());\r
- query.add(AuditEntity.relatedId("person").eq(person.getId()));\r
- setPagingParameter(query, pageSize, pageNumber);\r
- return query.getResultList();\r
- }\r
- }\r
-\r
- @Override\r
- public List<Person> getMembers(Team team, Integer pageSize, Integer pageNumber) {\r
- checkNotInPriorView("AgentDaoImpl.getMembers(Team team, Integer pageSize, Integer pageNumber)");\r
- Query query = getSession().createQuery("select teamMember from Team team join team.teamMembers teamMember where team = :team order by sortindex");\r
- query.setParameter("team", team);\r
- //query.addOrder( Order.asc("sortindex") );\r
- setPagingParameter(query, pageSize, pageNumber);\r
- return query.list();\r
- }\r
-\r
- @Override\r
- public Integer countAddresses(AgentBase agent) {\r
- checkNotInPriorView("AgentDaoImpl.countAddresses(AgentBase agent)");\r
- Query query = getSession().createQuery("select count(address) from AgentBase agent join agent.contact.addresses address where agent = :agent");\r
- query.setParameter("agent", agent);\r
- return ((Long)query.uniqueResult()).intValue();\r
- }\r
-\r
- @Override\r
- public List<Address> getAddresses(AgentBase agent, Integer pageSize,Integer pageNumber) {\r
- checkNotInPriorView("AgentDaoImpl.getAddresses(AgentBase agent, Integer pageSize,Integer pageNumber)");\r
- Query query = getSession().createQuery("select address from AgentBase agent join agent.contact.addresses address where agent = :agent");\r
- query.setParameter("agent", agent);\r
- setPagingParameter(query, pageSize, pageNumber);\r
- return query.list();\r
- }\r
-\r
-\r
- @Override\r
- public List<UuidAndTitleCache<Team>> getTeamUuidAndNomenclaturalTitle() {\r
- List<UuidAndTitleCache<Team>> list = new ArrayList<UuidAndTitleCache<Team>>();\r
- Session session = getSession();\r
-\r
- Query query = session.createQuery("select uuid, id, nomenclaturalTitle from " + type.getSimpleName() + " where dtype = 'Team'");\r
-\r
- @SuppressWarnings("unchecked")\r
- List<Object[]> result = query.list();\r
-\r
- for(Object[] object : result){\r
- list.add(new UuidAndTitleCache<Team>(Team.class, (UUID) object[0], (Integer)object[1], (String) object[2]));\r
- }\r
-\r
- return list;\r
- }\r
-\r
- @Override\r
- public List<UuidAndTitleCache<Person>> getPersonUuidAndTitleCache() {\r
- Query query = getSession().createQuery("select uuid, id, titleCache from " + type.getSimpleName() + " where dtype = 'Person'");\r
- return getUuidAndTitleCache(query);\r
- }\r
-\r
- @Override\r
- public List<UuidAndTitleCache<Team>> getTeamUuidAndTitleCache() {\r
- Query query = getSession().createQuery("select uuid, id, titleCache from " + type.getSimpleName() + " where dtype = 'Team'");\r
- return getUuidAndTitleCache(query);\r
- }\r
-\r
- @Override\r
- public List<UuidAndTitleCache<Institution>> getInstitutionUuidAndTitleCache() {\r
- Query query = getSession().createQuery("select uuid, id, titleCache from " + type.getSimpleName() + " where dtype = 'Institution'");\r
- return getUuidAndTitleCache(query);\r
- }\r
-\r
-}\r
+/**
+* Copyright (C) 2007 EDIT
+* European Distributed Institute of Taxonomy
+* http://www.e-taxonomy.eu
+*
+* The contents of this file are subject to the Mozilla Public License Version 1.1
+* See LICENSE.TXT at the top of this package for the full license terms.
+*/
+
+package eu.etaxonomy.cdm.persistence.dao.hibernate.agent;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.UUID;
+
+import org.apache.log4j.Logger;
+import org.hibernate.Criteria;
+import org.hibernate.Query;
+import org.hibernate.Session;
+import org.hibernate.criterion.Restrictions;
+import org.hibernate.envers.query.AuditEntity;
+import org.hibernate.envers.query.AuditQuery;
+import org.springframework.stereotype.Repository;
+
+import eu.etaxonomy.cdm.model.agent.Address;
+import eu.etaxonomy.cdm.model.agent.AgentBase;
+import eu.etaxonomy.cdm.model.agent.Institution;
+import eu.etaxonomy.cdm.model.agent.InstitutionalMembership;
+import eu.etaxonomy.cdm.model.agent.Person;
+import eu.etaxonomy.cdm.model.agent.Team;
+import eu.etaxonomy.cdm.model.view.AuditEvent;
+import eu.etaxonomy.cdm.persistence.dao.agent.IAgentDao;
+import eu.etaxonomy.cdm.persistence.dao.hibernate.common.IdentifiableDaoBase;
+import eu.etaxonomy.cdm.persistence.dto.UuidAndTitleCache;
+
+
+@Repository
+public class AgentDaoImpl extends IdentifiableDaoBase<AgentBase> implements IAgentDao{
+
+ @SuppressWarnings("unused")
+ private static final Logger logger = Logger.getLogger(AgentDaoImpl.class);
+
+ public AgentDaoImpl() {
+ super(AgentBase.class);
+ indexedClasses = new Class[3];
+ indexedClasses[0] = Institution.class;
+ indexedClasses[1] = Person.class;
+ indexedClasses[2] = Team.class;
+ }
+
+ @Override
+ public List<Institution> getInstitutionByCode(String code) {
+ AuditEvent auditEvent = getAuditEventFromContext();
+ if(auditEvent.equals(AuditEvent.CURRENT_VIEW)) {
+ Criteria crit = getSession().createCriteria(Institution.class);
+ crit.add(Restrictions.eq("code", code));
+ return crit.list();
+ } else {
+ AuditQuery query = getAuditReader().createQuery().forEntitiesAtRevision(Institution.class,auditEvent.getRevisionNumber());
+ query.add(AuditEntity.property("code").eq(code));
+ return query.getResultList();
+ }
+ }
+
+ @Override
+ public int countInstitutionalMemberships(Person person) {
+ AuditEvent auditEvent = getAuditEventFromContext();
+ if(auditEvent.equals(AuditEvent.CURRENT_VIEW)) {
+ Query query = getSession().createQuery("select count(institutionalMembership) from InstitutionalMembership institutionalMembership where institutionalMembership.person = :person");
+ query.setParameter("person", person);
+ return ((Long)query.uniqueResult()).intValue();
+ } else {
+ AuditQuery query = getAuditReader().createQuery().forEntitiesAtRevision(InstitutionalMembership.class,auditEvent.getRevisionNumber());
+ query.add(AuditEntity.relatedId("person").eq(person.getId()));
+ query.addProjection(AuditEntity.id());
+ return ((Long)query.getSingleResult()).intValue();
+ }
+ }
+
+ @Override
+ public int countMembers(Team team) {
+ checkNotInPriorView("AgentDaoImpl.countMembers(Team team)");
+ Query query = getSession().createQuery("select count(teamMember) from Team team join team.teamMembers teamMember where team = :team");
+ query.setParameter("team", team);
+ return ((Long)query.uniqueResult()).intValue();
+ }
+
+ @Override
+ public List<InstitutionalMembership> getInstitutionalMemberships(Person person, Integer pageSize, Integer pageNumber) {
+ AuditEvent auditEvent = getAuditEventFromContext();
+ if(auditEvent.equals(AuditEvent.CURRENT_VIEW)) {
+ Query query = getSession().createQuery("select institutionalMembership from InstitutionalMembership institutionalMembership left join fetch institutionalMembership.institute where institutionalMembership.person = :person");
+ query.setParameter("person", person);
+ setPagingParameter(query, pageSize, pageNumber);
+ return query.list();
+ } else {
+ AuditQuery query = getAuditReader().createQuery().forEntitiesAtRevision(InstitutionalMembership.class,auditEvent.getRevisionNumber());
+ query.add(AuditEntity.relatedId("person").eq(person.getId()));
+ setPagingParameter(query, pageSize, pageNumber);
+ return query.getResultList();
+ }
+ }
+
+ @Override
+ public List<Person> getMembers(Team team, Integer pageSize, Integer pageNumber) {
+ checkNotInPriorView("AgentDaoImpl.getMembers(Team team, Integer pageSize, Integer pageNumber)");
+ Query query = getSession().createQuery("select teamMember from Team team join team.teamMembers teamMember where team = :team order by sortindex");
+ query.setParameter("team", team);
+ //query.addOrder( Order.asc("sortindex") );
+ setPagingParameter(query, pageSize, pageNumber);
+ return query.list();
+ }
+
+ @Override
+ public Integer countAddresses(AgentBase agent) {
+ checkNotInPriorView("AgentDaoImpl.countAddresses(AgentBase agent)");
+ Query query = getSession().createQuery("select count(address) from AgentBase agent join agent.contact.addresses address where agent = :agent");
+ query.setParameter("agent", agent);
+ return ((Long)query.uniqueResult()).intValue();
+ }
+
+ @Override
+ public List<Address> getAddresses(AgentBase agent, Integer pageSize,Integer pageNumber) {
+ checkNotInPriorView("AgentDaoImpl.getAddresses(AgentBase agent, Integer pageSize,Integer pageNumber)");
+ Query query = getSession().createQuery("select address from AgentBase agent join agent.contact.addresses address where agent = :agent");
+ query.setParameter("agent", agent);
+ setPagingParameter(query, pageSize, pageNumber);
+ return query.list();
+ }
+
+
+ @Override
+ public List<UuidAndTitleCache<Team>> getTeamUuidAndNomenclaturalTitle() {
+ List<UuidAndTitleCache<Team>> list = new ArrayList<UuidAndTitleCache<Team>>();
+ Session session = getSession();
+
+ Query query = session.createQuery("select uuid, id, nomenclaturalTitle from " + type.getSimpleName() + " where dtype = 'Team'");
+
+ @SuppressWarnings("unchecked")
+ List<Object[]> result = query.list();
+
+ for(Object[] object : result){
+ list.add(new UuidAndTitleCache<Team>(Team.class, (UUID) object[0], (Integer)object[1], (String) object[2]));
+ }
+
+ return list;
+ }
+
+ @Override
+ public List<UuidAndTitleCache<Person>> getPersonUuidAndTitleCache() {
+ Query query = getSession().createQuery("select uuid, id, titleCache from " + type.getSimpleName() + " where dtype = 'Person'");
+ return getUuidAndTitleCache(query);
+ }
+
+ @Override
+ public List<UuidAndTitleCache<Team>> getTeamUuidAndTitleCache() {
+ Query query = getSession().createQuery("select uuid, id, titleCache from " + type.getSimpleName() + " where dtype = 'Team'");
+ return getUuidAndTitleCache(query);
+ }
+
+ @Override
+ public List<UuidAndTitleCache<Institution>> getInstitutionUuidAndTitleCache() {
+ Query query = getSession().createQuery("select uuid, id, titleCache from " + type.getSimpleName() + " where dtype = 'Institution'");
+ return getUuidAndTitleCache(query);
+ }
+
+}
import org.apache.log4j.Logger;\r
import org.apache.lucene.search.Sort;\r
import org.apache.lucene.search.SortField;\r
-import org.apache.lucene.util.Version;\r
import org.hibernate.Criteria;\r
import org.hibernate.FlushMode;\r
import org.hibernate.HibernateException;\r
import org.springframework.stereotype.Repository;\r
import org.springframework.util.ReflectionUtils;\r
\r
-import eu.etaxonomy.cdm.config.Configuration;\r
import eu.etaxonomy.cdm.model.common.CdmBase;\r
import eu.etaxonomy.cdm.model.common.User;\r
import eu.etaxonomy.cdm.model.common.VersionableEntity;\r
\r
protected Class<T> type;\r
\r
- protected Version version = Configuration.luceneVersion;\r
+// protected Version version = Configuration.luceneVersion;\r
\r
@Autowired\r
// @Qualifier("defaultBeanInitializer")\r
public MergeResult<T> merge(T transientObject, boolean returnTransientEntity) throws DataAccessException {\r
Session session = getSession();\r
PostMergeEntityListener.addSession(session);\r
- MergeResult result = null;\r
+ MergeResult<T> result = null;\r
try {\r
@SuppressWarnings("unchecked")\r
T persistentObject = (T)session.merge(transientObject);\r
return persistentObject.getUuid();\r
}\r
\r
- @SuppressWarnings("unchecked")\r
@Override\r
public T findById(int id) throws DataAccessException {\r
return (T) getSession().get(type, id);\r
\r
protected void addOrder(FullTextQuery fullTextQuery, List<OrderHint> orderHints) {\r
//FIXME preliminary hardcoded type:\r
- int type = SortField.STRING;\r
+ SortField.Type type = SortField.Type.STRING;\r
\r
if(orderHints != null && !orderHints.isEmpty()) {\r
org.apache.lucene.search.Sort sort = new Sort();\r
\r
}\r
}\r
-
+\r
if(type != null) {
query.add(AuditEntity.relatedId("type").eq(type.getId()));
}
- query.addProjection(AuditEntity.id().count("id"));
+ query.addProjection(AuditEntity.id().count());
return ((Long)query.getSingleResult()).intValue();
}
}
} else {
AuditQuery query = getAuditReader().createQuery().forEntitiesAtRevision(DefinedTermBase.class,auditEvent.getRevisionNumber());
query.add(AuditEntity.relatedId("kindOf").eq(kindOf.getId()));
- query.addProjection(AuditEntity.id().count("id"));
+ query.addProjection(AuditEntity.id().count());
return ((Long)query.getSingleResult()).intValue();
}
}
for(T t : partOf) {
AuditQuery query = getAuditReader().createQuery().forEntitiesAtRevision(DefinedTermBase.class,auditEvent.getRevisionNumber());
query.add(AuditEntity.relatedId("partOf").eq(t.getId()));
- query.addProjection(AuditEntity.id().count("id"));
+ query.addProjection(AuditEntity.id().count());
count += ((Long)query.getSingleResult()).intValue();
}
return count;
return ((Long)query.uniqueResult()).intValue();
} else {
AuditQuery query = getAuditReader().createQuery().forEntitiesAtRevision(type,auditEvent.getRevisionNumber());
- query.addProjection(AuditEntity.id().count("id"));
+ query.addProjection(AuditEntity.id().count());
query.add(AuditEntity.relatedId("vocabulary").eq(termVocabulary.getId()));
return ((Number)query.getSingleResult()).intValue();
}
} else {
AuditQuery query = null;
if(clazz == null) {
- query = getAuditReader().createQuery().forEntitiesAtRevision(type,auditEvent.getRevisionNumber());
+ query = getAuditReader().createQuery().forEntitiesAtRevision(type, auditEvent.getRevisionNumber());
} else {
- query = getAuditReader().createQuery().forEntitiesAtRevision(clazz,auditEvent.getRevisionNumber());
+ query = getAuditReader().createQuery().forEntitiesAtRevision(clazz, auditEvent.getRevisionNumber());
}
- query.addProjection(AuditEntity.id().count("id"));
+ query.addProjection(AuditEntity.id().count());
+
return ((Long)query.getSingleResult()).intValue();
}
}
-/**\r
-* Copyright (C) 2007 EDIT\r
-* European Distributed Institute of Taxonomy\r
-* http://www.e-taxonomy.eu\r
-*\r
-* The contents of this file are subject to the Mozilla Public License Version 1.1\r
-* See LICENSE.TXT at the top of this package for the full license terms.\r
-*/\r
-\r
-package eu.etaxonomy.cdm.persistence.dao.hibernate.description;\r
-\r
-import java.util.ArrayList;\r
-import java.util.HashMap;\r
-import java.util.HashSet;\r
-import java.util.List;\r
-import java.util.Map;\r
-import java.util.Set;\r
-import java.util.UUID;\r
-\r
-import org.apache.commons.lang.ArrayUtils;\r
-import org.apache.log4j.Logger;\r
-import org.hibernate.Criteria;\r
-import org.hibernate.Query;\r
-import org.hibernate.criterion.ProjectionList;\r
-import org.hibernate.criterion.Projections;\r
-import org.hibernate.criterion.Restrictions;\r
-import org.hibernate.envers.query.AuditEntity;\r
-import org.hibernate.envers.query.AuditQuery;\r
-import org.springframework.beans.factory.annotation.Qualifier;\r
-import org.springframework.stereotype.Repository;\r
-\r
-import eu.etaxonomy.cdm.model.common.DefinedTerm;\r
-import eu.etaxonomy.cdm.model.common.LSID;\r
-import eu.etaxonomy.cdm.model.common.MarkerType;\r
-import eu.etaxonomy.cdm.model.common.Representation;\r
-import eu.etaxonomy.cdm.model.description.CommonTaxonName;\r
-import eu.etaxonomy.cdm.model.description.DescriptionBase;\r
-import eu.etaxonomy.cdm.model.description.DescriptionElementBase;\r
-import eu.etaxonomy.cdm.model.description.Feature;\r
-import eu.etaxonomy.cdm.model.description.PresenceAbsenceTerm;\r
-import eu.etaxonomy.cdm.model.description.SpecimenDescription;\r
-import eu.etaxonomy.cdm.model.description.TaxonDescription;\r
-import eu.etaxonomy.cdm.model.description.TaxonNameDescription;\r
-import eu.etaxonomy.cdm.model.location.NamedArea;\r
-import eu.etaxonomy.cdm.model.media.Media;\r
-import eu.etaxonomy.cdm.model.name.TaxonNameBase;\r
-import eu.etaxonomy.cdm.model.taxon.Taxon;\r
-import eu.etaxonomy.cdm.model.view.AuditEvent;\r
-import eu.etaxonomy.cdm.persistence.dao.common.OperationNotSupportedInPriorViewException;\r
-import eu.etaxonomy.cdm.persistence.dao.description.IDescriptionDao;\r
-import eu.etaxonomy.cdm.persistence.dao.hibernate.common.IdentifiableDaoBase;\r
-import eu.etaxonomy.cdm.persistence.dto.TermDto;\r
-import eu.etaxonomy.cdm.persistence.query.MatchMode;\r
-import eu.etaxonomy.cdm.persistence.query.OrderHint;\r
-\r
-@Repository\r
-@Qualifier("descriptionDaoImpl")\r
-public class DescriptionDaoImpl extends IdentifiableDaoBase<DescriptionBase> implements IDescriptionDao{\r
-\r
- private static final Logger logger = Logger.getLogger(DescriptionDaoImpl.class);\r
-\r
- public DescriptionDaoImpl() {\r
- super(DescriptionBase.class);\r
- indexedClasses = new Class[3];\r
- indexedClasses[0] = TaxonDescription.class;\r
- indexedClasses[1] = TaxonNameDescription.class;\r
- indexedClasses[2] = SpecimenDescription.class;\r
- }\r
-\r
-// @Override //Override for testing\r
-// public DescriptionBase load(UUID uuid, List<String> propertyPaths){\r
-// DescriptionBase bean = findByUuid(uuid);\r
-// if(bean == null){\r
-// return bean;\r
-// }\r
-// defaultBeanInitializer.initialize(bean, propertyPaths);\r
-//\r
-// return bean;\r
-// }\r
-\r
- @Override\r
- public int countDescriptionByDistribution(Set<NamedArea> namedAreas, PresenceAbsenceTerm status) {\r
- checkNotInPriorView("DescriptionDaoImpl.countDescriptionByDistribution(Set<NamedArea> namedAreas, PresenceAbsenceTermBase status)");\r
- Query query = null;\r
-\r
- if(status == null) {\r
- query = getSession().createQuery("select count(distinct description) from TaxonDescription description left join description.descriptionElements element join element.area area where area in (:namedAreas)");\r
- } else {\r
- query = getSession().createQuery("select count(distinct description) from TaxonDescription description left join description.descriptionElements element join element.area area join element.status status where area in (:namedAreas) and status = :status");\r
- query.setParameter("status", status);\r
- }\r
- query.setParameterList("namedAreas", namedAreas);\r
-\r
- return ((Long)query.uniqueResult()).intValue();\r
- }\r
-\r
- @Override\r
- public int countDescriptionElements(DescriptionBase description, Set<Feature> features, Class<? extends DescriptionElementBase> clazz) {\r
- return countDescriptionElements(description, null, features, clazz);\r
- }\r
-\r
- @Override\r
- public int countDescriptionElements(DescriptionBase description, Class<? extends DescriptionBase> descriptionType,\r
- Set<Feature> features, Class<? extends DescriptionElementBase> clazz) {\r
- AuditEvent auditEvent = getAuditEventFromContext();\r
- if(auditEvent.equals(AuditEvent.CURRENT_VIEW)) {\r
- Criteria criteria = null;\r
- if(clazz == null) {\r
- criteria = getSession().createCriteria(DescriptionElementBase.class);\r
- } else {\r
- criteria = getSession().createCriteria(clazz);\r
- }\r
-\r
- if(description != null) {\r
- criteria.add(Restrictions.eq("inDescription", description));\r
- }\r
-\r
- if(descriptionType != null) {\r
- criteria.createAlias("inDescription", "d").add(Restrictions.eq("d.class", descriptionType));\r
- }\r
-\r
- if(features != null && !features.isEmpty()) {\r
- criteria.add(Restrictions.in("feature", features));\r
- }\r
-\r
- criteria.setProjection(Projections.rowCount());\r
-\r
- return ((Number)criteria.uniqueResult()).intValue();\r
- } else {\r
- if(features != null && !features.isEmpty()) {\r
- Integer count = 0;\r
- for(Feature f : features) {\r
- AuditQuery query = null;\r
- if(clazz == null) {\r
- query = getAuditReader().createQuery().forEntitiesAtRevision(DescriptionElementBase.class,auditEvent.getRevisionNumber());\r
- } else {\r
- query = getAuditReader().createQuery().forEntitiesAtRevision(clazz,auditEvent.getRevisionNumber());\r
- }\r
-\r
- if(description != null) {\r
- query.add(AuditEntity.relatedId("inDescription").eq(description.getId()));\r
- }\r
-\r
- if(descriptionType != null) {\r
- query.add(AuditEntity.property("inDescription.class").eq(descriptionType));\r
- }\r
-\r
- query.add(AuditEntity.relatedId("feature").eq(f.getId()));\r
- query.addProjection(AuditEntity.id().count("id"));\r
- count += ((Long)query.getSingleResult()).intValue();\r
- }\r
-\r
- return count;\r
- } else {\r
- AuditQuery query = null;\r
- if(clazz == null) {\r
- query = getAuditReader().createQuery().forEntitiesAtRevision(DescriptionElementBase.class,auditEvent.getRevisionNumber());\r
- } else {\r
- query = getAuditReader().createQuery().forEntitiesAtRevision(clazz,auditEvent.getRevisionNumber());\r
- }\r
-\r
- if(description != null) {\r
- query.add(AuditEntity.relatedId("inDescription").eq(description.getId()));\r
- }\r
- if(descriptionType != null) {\r
- query.add(AuditEntity.property("inDescription.class").eq(descriptionType));\r
- }\r
-\r
- query.addProjection(AuditEntity.id().count("id"));\r
- return ((Long)query.getSingleResult()).intValue();\r
- }\r
- }\r
- }\r
-\r
- @Override\r
- public int countDescriptions(Class<? extends DescriptionBase> clazz, Boolean hasImages, Boolean hasText, Set<Feature> features) {\r
- checkNotInPriorView("DescriptionDaoImpl.countDescriptions(Class<TYPE> type, Boolean hasImages, Boolean hasText, Set<Feature> features)");\r
- Criteria inner = null;\r
-\r
- if(clazz == null) {\r
- inner = getSession().createCriteria(type);\r
- } else {\r
- inner = getSession().createCriteria(clazz);\r
- }\r
-\r
- Criteria elementsCriteria = inner.createCriteria("descriptionElements");\r
- if(hasText != null) {\r
- if(hasText) {\r
- elementsCriteria.add(Restrictions.isNotEmpty("multilanguageText"));\r
- } else {\r
- elementsCriteria.add(Restrictions.isEmpty("multilanguageText"));\r
- }\r
- }\r
-\r
- if(hasImages != null) {\r
- if(hasImages) {\r
- elementsCriteria.add(Restrictions.isNotEmpty("media"));\r
- } else {\r
- elementsCriteria.add(Restrictions.isEmpty("media"));\r
- }\r
- }\r
-\r
- if(features != null && !features.isEmpty()) {\r
- elementsCriteria.add(Restrictions.in("feature", features));\r
- }\r
-\r
- inner.setProjection(Projections.countDistinct("id"));\r
-\r
- return ((Number) inner.uniqueResult()).intValue();\r
- }\r
-\r
- @Override\r
- public int countTaxonDescriptions(Taxon taxon, Set<DefinedTerm> scopes,Set<NamedArea> geographicalScopes, Set<MarkerType> markerTypes) {\r
- AuditEvent auditEvent = getAuditEventFromContext();\r
- if(auditEvent.equals(AuditEvent.CURRENT_VIEW)) {\r
- Criteria criteria = getSession().createCriteria(TaxonDescription.class);\r
-\r
- if(taxon != null) {\r
- criteria.add(Restrictions.eq("taxon", taxon));\r
- }\r
-\r
- if(scopes != null && !scopes.isEmpty()) {\r
- Set<Integer> scopeIds = new HashSet<Integer>();\r
- for(DefinedTerm s : scopes) {\r
- scopeIds.add(s.getId());\r
- }\r
- criteria.createCriteria("scopes").add(Restrictions.in("id", scopeIds));\r
- }\r
-\r
- if(geographicalScopes != null && !geographicalScopes.isEmpty()) {\r
- Set<Integer> geoScopeIds = new HashSet<Integer>();\r
- for(NamedArea n : geographicalScopes) {\r
- geoScopeIds.add(n.getId());\r
- }\r
- criteria.createCriteria("geoScopes").add(Restrictions.in("id", geoScopeIds));\r
- }\r
-\r
-\r
- addMarkerTypesCriterion(markerTypes, criteria);\r
-\r
-\r
- criteria.setProjection(Projections.rowCount());\r
-\r
- return ((Number)criteria.uniqueResult()).intValue();\r
- } else {\r
- if((scopes == null || scopes.isEmpty())&& (geographicalScopes == null || geographicalScopes.isEmpty()) && (markerTypes == null || markerTypes.isEmpty())) {\r
- AuditQuery query = getAuditReader().createQuery().forEntitiesAtRevision(TaxonDescription.class,auditEvent.getRevisionNumber());\r
- if(taxon != null) {\r
- query.add(AuditEntity.relatedId("taxon").eq(taxon.getId()));\r
- }\r
-\r
- query.addProjection(AuditEntity.id().count("id"));\r
-\r
- return ((Long)query.getSingleResult()).intValue();\r
- } else {\r
- throw new OperationNotSupportedInPriorViewException("countTaxonDescriptions(Taxon taxon, Set<Scope> scopes,Set<NamedArea> geographicalScopes)");\r
- }\r
- }\r
- }\r
-\r
- /**\r
- * @param markerTypes\r
- * @param criteria\r
- *\r
- */\r
- //TODO move to AnnotatableEntityDao(?)\r
- private void addMarkerTypesCriterion(Set<MarkerType> markerTypes, Criteria criteria) {\r
-\r
- if(markerTypes != null && !markerTypes.isEmpty()) {\r
- Set<Integer> markerTypeIds = new HashSet<Integer>();\r
- for(MarkerType markerType : markerTypes) {\r
- markerTypeIds.add(markerType.getId());\r
- }\r
- criteria.createCriteria("markers").add(Restrictions.eq("flag", true))\r
- .createAlias("markerType", "mt")\r
- .add(Restrictions.in("mt.id", markerTypeIds));\r
- } else if (markerTypes != null && markerTypes.isEmpty()){\r
- //AT: added in case the projects requires an third state description, An empty Marker type set\r
- }\r
- }\r
- @Override\r
- public List<DescriptionElementBase> getDescriptionElements(\r
- DescriptionBase description, Set<Feature> features,\r
- Class<? extends DescriptionElementBase> clazz, Integer pageSize, Integer pageNumber, List<String> propertyPaths) {\r
- return getDescriptionElements(description, null, features, clazz, pageSize, pageNumber, propertyPaths);\r
- }\r
-\r
- @Override\r
- public List<DescriptionElementBase> getDescriptionElements(\r
- DescriptionBase description, Class<? extends DescriptionBase> descriptionType,\r
- Set<Feature> features,\r
- Class<? extends DescriptionElementBase> clazz,\r
- Integer pageSize, Integer pageNumber, List<String> propertyPaths) {\r
-\r
- AuditEvent auditEvent = getAuditEventFromContext();\r
- if(auditEvent.equals(AuditEvent.CURRENT_VIEW)) {\r
- Criteria criteria = null;\r
- if(clazz == null) {\r
- criteria = getSession().createCriteria(DescriptionElementBase.class);\r
- } else {\r
- criteria = getSession().createCriteria(clazz);\r
- }\r
-\r
- if(description != null) {\r
- criteria.add(Restrictions.eq("inDescription", description));\r
- }\r
- if(descriptionType != null) {\r
- criteria.createAlias("inDescription", "d").add(Restrictions.eq("d.class", descriptionType));\r
- }\r
-\r
- if(features != null && !features.isEmpty()) {\r
- criteria.add(Restrictions.in("feature", features));\r
- }\r
-\r
- if(pageSize != null) {\r
- criteria.setMaxResults(pageSize);\r
- if(pageNumber != null) {\r
- criteria.setFirstResult(pageNumber * pageSize);\r
- }\r
- }\r
-\r
- List<DescriptionElementBase> results = criteria.list();\r
-\r
- defaultBeanInitializer.initializeAll(results, propertyPaths);\r
-\r
- return results;\r
- } else {\r
- List<DescriptionElementBase> result = new ArrayList<DescriptionElementBase>();\r
- if(features != null && !features.isEmpty()) {\r
-\r
- for(Feature f : features) {\r
- AuditQuery query = null;\r
- if(clazz == null) {\r
- query = getAuditReader().createQuery().forEntitiesAtRevision(DescriptionElementBase.class,auditEvent.getRevisionNumber());\r
- } else {\r
- query = getAuditReader().createQuery().forEntitiesAtRevision(clazz,auditEvent.getRevisionNumber());\r
- }\r
-\r
- if(description != null) {\r
- query.add(AuditEntity.relatedId("inDescription").eq(description.getId()));\r
- }\r
-\r
- if(descriptionType != null) {\r
- query.add(AuditEntity.property("inDescription.class").eq(descriptionType));\r
- }\r
-\r
- query.add(AuditEntity.relatedId("feature").eq(f.getId()));\r
- result.addAll(query.getResultList());\r
- }\r
- } else {\r
- AuditQuery query = null;\r
- if(clazz == null) {\r
- query = getAuditReader().createQuery().forEntitiesAtRevision(DescriptionElementBase.class,auditEvent.getRevisionNumber());\r
- } else {\r
- query = getAuditReader().createQuery().forEntitiesAtRevision(clazz,auditEvent.getRevisionNumber());\r
- }\r
-\r
- if(description != null) {\r
- query.add(AuditEntity.relatedId("inDescription").eq(description.getId()));\r
- }\r
-\r
- if(descriptionType != null) {\r
- query.add(AuditEntity.property("inDescription.class").eq(descriptionType));\r
- }\r
-\r
- result = query.getResultList();\r
- }\r
-\r
- defaultBeanInitializer.initializeAll(result, propertyPaths);\r
-\r
- return result;\r
- }\r
- }\r
-\r
- @Override\r
- public List<TaxonDescription> listTaxonDescriptions(Taxon taxon, Set<DefinedTerm> scopes, Set<NamedArea> geographicalScopes, Set<MarkerType> markerTypes, Integer pageSize, Integer pageNumber, List<String> propertyPaths) {\r
- AuditEvent auditEvent = getAuditEventFromContext();\r
- if(auditEvent.equals(AuditEvent.CURRENT_VIEW)) {\r
- Criteria criteria = getSession().createCriteria(TaxonDescription.class);\r
-\r
- if(taxon != null) {\r
- criteria.add(Restrictions.eq("taxon", taxon));\r
- }\r
-\r
- if(scopes != null && !scopes.isEmpty()) {\r
- Set<Integer> scopeIds = new HashSet<Integer>();\r
- for(DefinedTerm s : scopes) {\r
- scopeIds.add(s.getId());\r
- }\r
- criteria.createCriteria("scopes").add(Restrictions.in("id", scopeIds));\r
- }\r
-\r
- if(geographicalScopes != null && !geographicalScopes.isEmpty()) {\r
- Set<Integer> geoScopeIds = new HashSet<Integer>();\r
- for(NamedArea n : geographicalScopes) {\r
- geoScopeIds.add(n.getId());\r
- }\r
- criteria.createCriteria("geoScopes").add(Restrictions.in("id", geoScopeIds));\r
- }\r
-\r
- addMarkerTypesCriterion(markerTypes, criteria);\r
-\r
- if(pageSize != null) {\r
- criteria.setMaxResults(pageSize);\r
- if(pageNumber != null) {\r
- criteria.setFirstResult(pageNumber * pageSize);\r
- }\r
- }\r
-\r
- List<TaxonDescription> results = criteria.list();\r
-\r
- defaultBeanInitializer.initializeAll(results, propertyPaths);\r
-\r
- return results;\r
- } else {\r
- if((scopes == null || scopes.isEmpty())&& (geographicalScopes == null || geographicalScopes.isEmpty())&& (markerTypes == null || markerTypes.isEmpty())) {\r
- AuditQuery query = getAuditReader().createQuery().forEntitiesAtRevision(TaxonDescription.class,auditEvent.getRevisionNumber());\r
- if(taxon != null) {\r
- query.add(AuditEntity.relatedId("taxon").eq(taxon.getId()));\r
- }\r
-\r
- if(pageSize != null) {\r
- query.setMaxResults(pageSize);\r
- if(pageNumber != null) {\r
- query.setFirstResult(pageNumber * pageSize);\r
- } else {\r
- query.setFirstResult(0);\r
- }\r
- }\r
-\r
- List<TaxonDescription> results = query.getResultList();\r
- defaultBeanInitializer.initializeAll(results, propertyPaths);\r
- return results;\r
- } else {\r
- throw new OperationNotSupportedInPriorViewException("countTaxonDescriptions(Taxon taxon, Set<Scope> scopes,Set<NamedArea> geographicalScopes)");\r
- }\r
- }\r
- }\r
-\r
- @Override\r
- public List<TaxonNameDescription> getTaxonNameDescriptions(TaxonNameBase name, Integer pageSize, Integer pageNumber, List<String> propertyPaths) {\r
- AuditEvent auditEvent = getAuditEventFromContext();\r
- if(auditEvent.equals(AuditEvent.CURRENT_VIEW)) {\r
- Criteria criteria = getSession().createCriteria(TaxonNameDescription.class);\r
-\r
- if(name != null) {\r
- criteria.add(Restrictions.eq("taxonName", name));\r
- }\r
-\r
- if(pageSize != null) {\r
- criteria.setMaxResults(pageSize);\r
- if(pageNumber != null) {\r
- criteria.setFirstResult(pageNumber * pageSize);\r
- }\r
- }\r
-\r
- List<TaxonNameDescription> results = criteria.list();\r
-\r
- defaultBeanInitializer.initializeAll(results, propertyPaths);\r
-\r
- return results;\r
- } else {\r
- AuditQuery query = getAuditReader().createQuery().forEntitiesAtRevision(TaxonNameDescription.class,auditEvent.getRevisionNumber());\r
-\r
- if(name != null) {\r
- query.add(AuditEntity.relatedId("taxonName").eq(name.getId()));\r
- }\r
-\r
- if(pageSize != null) {\r
- query.setMaxResults(pageSize);\r
- if(pageNumber != null) {\r
- query.setFirstResult(pageNumber * pageSize);\r
- }\r
- }\r
-\r
- List<TaxonNameDescription> results = query.getResultList();\r
-\r
- defaultBeanInitializer.initializeAll(results, propertyPaths);\r
-\r
- return results;\r
- }\r
-\r
- }\r
-\r
- @Override\r
- public int countTaxonNameDescriptions(TaxonNameBase name) {\r
- AuditEvent auditEvent = getAuditEventFromContext();\r
- if(auditEvent.equals(AuditEvent.CURRENT_VIEW)) {\r
- Criteria criteria = getSession().createCriteria(TaxonNameDescription.class);\r
-\r
- if(name != null) {\r
- criteria.add(Restrictions.eq("taxonName", name));\r
- }\r
-\r
- criteria.setProjection(Projections.rowCount());\r
-\r
- return ((Number)criteria.uniqueResult()).intValue();\r
- } else {\r
- AuditQuery query = getAuditReader().createQuery().forEntitiesAtRevision(TaxonNameDescription.class,auditEvent.getRevisionNumber());\r
-\r
- if(name != null) {\r
- query.add(AuditEntity.relatedId("taxonName").eq(name.getId()));\r
- }\r
-\r
- query.addProjection(AuditEntity.id().count("id"));\r
- return ((Long)query.getSingleResult()).intValue();\r
- }\r
- }\r
-\r
- /**\r
- * Should use a DetachedCriteria & subquery, but HHH-158 prevents this, for now.\r
- *\r
- * e.g. DetachedCriteria inner = DestachedCriteria.forClass(type);\r
- *\r
- * outer.add(Subqueries.propertyIn("id", inner));\r
- */\r
- @Override\r
- public List<DescriptionBase> listDescriptions(Class<? extends DescriptionBase> clazz, Boolean hasImages, Boolean hasText, Set<Feature> features, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) {\r
- checkNotInPriorView("DescriptionDaoImpl.listDescriptions(Class<TYPE> type, Boolean hasImages, Boolean hasText, Set<Feature> features, Integer pageSize, Integer pageNumber)");\r
- Criteria inner = null;\r
-\r
- if(clazz == null) {\r
- inner = getSession().createCriteria(type);\r
- } else {\r
- inner = getSession().createCriteria(clazz);\r
- }\r
-\r
- Criteria elementsCriteria = inner.createCriteria("descriptionElements");\r
- if(hasText != null) {\r
- if(hasText) {\r
- elementsCriteria.add(Restrictions.isNotEmpty("multilanguageText"));\r
- } else {\r
- elementsCriteria.add(Restrictions.isEmpty("multilanguageText"));\r
- }\r
- }\r
-\r
- if(hasImages != null) {\r
- if(hasImages) {\r
- elementsCriteria.add(Restrictions.isNotEmpty("media"));\r
- } else {\r
- elementsCriteria.add(Restrictions.isEmpty("media"));\r
- }\r
- }\r
-\r
- if(features != null && !features.isEmpty()) {\r
- elementsCriteria.add(Restrictions.in("feature", features));\r
- }\r
-\r
- inner.setProjection(Projections.distinct(Projections.id()));\r
-\r
- List<Object> intermediateResult = inner.list();\r
-\r
- if(intermediateResult.isEmpty()) {\r
- return new ArrayList<DescriptionBase>();\r
- }\r
-\r
- Integer[] resultIds = new Integer[intermediateResult.size()];\r
- for(int i = 0; i < resultIds.length; i++) {\r
- resultIds[i] = ((Number)intermediateResult.get(i)).intValue();\r
- }\r
-\r
- Criteria outer = null;\r
-\r
- if(clazz == null) {\r
- outer = getSession().createCriteria(type);\r
- } else {\r
- outer = getSession().createCriteria(clazz);\r
- }\r
-\r
- outer.add(Restrictions.in("id", resultIds));\r
- addOrder(outer, orderHints);\r
-\r
- if(pageSize != null) {\r
- outer.setMaxResults(pageSize);\r
- if(pageNumber != null) {\r
- outer.setFirstResult(pageNumber * pageSize);\r
- }\r
- }\r
-\r
- List<DescriptionBase> results = outer.list();\r
- defaultBeanInitializer.initializeAll(results, propertyPaths);\r
- return results;\r
- }\r
-\r
- @Override\r
- public List<TaxonDescription> searchDescriptionByDistribution(Set<NamedArea> namedAreas, PresenceAbsenceTerm status, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) {\r
- checkNotInPriorView("DescriptionDaoImpl.searchDescriptionByDistribution(Set<NamedArea> namedAreas, PresenceAbsenceTermBase status, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths)");\r
-\r
- Criteria criteria = getSession().createCriteria(TaxonDescription.class);\r
- Criteria elements = criteria.createCriteria("descriptionElements", "descriptionElement", Criteria.LEFT_JOIN);\r
- elements.add(Restrictions.in("area", namedAreas.toArray()));\r
-\r
- if(status != null) {\r
- elements.add(Restrictions.eq("status", status));\r
- }\r
-\r
- ProjectionList projectionList = Projections.projectionList().add(Projections.id());\r
-\r
- if(orderHints != null && !orderHints.isEmpty()) {\r
- for(OrderHint orderHint : orderHints) {\r
- projectionList = projectionList.add(Projections.property(orderHint.getPropertyName()));\r
- }\r
- }\r
-\r
- criteria.setProjection(Projections.distinct(projectionList));\r
-\r
- if(pageSize != null) {\r
- criteria.setMaxResults(pageSize);\r
- if(pageNumber != null) {\r
- criteria.setFirstResult(pageNumber * pageSize);\r
- }\r
- }\r
-\r
- addOrder(criteria,orderHints);\r
-\r
- List<Object> intermediateResult = criteria.list();\r
-\r
- if(intermediateResult.isEmpty()) {\r
- return new ArrayList<TaxonDescription>();\r
- }\r
-\r
- Integer[] resultIds = new Integer[intermediateResult.size()];\r
- for(int i = 0; i < resultIds.length; i++) {\r
- if(orderHints == null || orderHints.isEmpty()) {\r
- resultIds[i] = ((Number)intermediateResult.get(i)).intValue();\r
- } else {\r
- resultIds[i] = ((Number)((Object[])intermediateResult.get(i))[0]).intValue();\r
- }\r
- }\r
-\r
- criteria = getSession().createCriteria(TaxonDescription.class);\r
- criteria.add(Restrictions.in("id", resultIds));\r
- addOrder(criteria,orderHints);\r
-\r
- List<TaxonDescription> results = criteria.list();\r
- defaultBeanInitializer.initializeAll(results, propertyPaths);\r
- return results;\r
- }\r
-\r
- @Override\r
- public List<CommonTaxonName> searchDescriptionByCommonName(String queryString, MatchMode matchMode, Integer pageSize, Integer pageNumber) {\r
-\r
- Criteria crit = getSession().createCriteria(CommonTaxonName.class);\r
- if (matchMode == MatchMode.EXACT) {\r
- crit.add(Restrictions.eq("name", matchMode.queryStringFrom(queryString)));\r
- } else {\r
- crit.add(Restrictions.ilike("name", matchMode.queryStringFrom(queryString)));\r
- }\r
-\r
- if(pageSize != null) {\r
- crit.setMaxResults(pageSize);\r
- if(pageNumber != null) {\r
- crit.setFirstResult(pageNumber * pageSize);\r
- }\r
- }\r
- List<CommonTaxonName> results = crit.list();\r
- return results;\r
- }\r
-\r
- @Override\r
- public Integer countDescriptionByCommonName(String queryString, MatchMode matchMode) {\r
- //TODO inprove performance\r
- List<CommonTaxonName> results = searchDescriptionByCommonName(queryString, matchMode, null, null);\r
- return results.size();\r
- }\r
-\r
- @Override\r
- public DescriptionBase find(LSID lsid) {\r
- DescriptionBase descriptionBase = super.find(lsid);\r
- if(descriptionBase != null) {\r
- List<String> propertyPaths = new ArrayList<String>();\r
- propertyPaths.add("createdBy");\r
- propertyPaths.add("updatedBy");\r
- propertyPaths.add("taxon");\r
- propertyPaths.add("taxonName");\r
- propertyPaths.add("descriptionElements");\r
- propertyPaths.add("descriptionElements.createdBy");\r
- propertyPaths.add("descriptionElements.updatedBy");\r
- propertyPaths.add("descriptionElements.feature");\r
- propertyPaths.add("descriptionElements.multilanguageText");\r
- propertyPaths.add("descriptionElements.multilanguageText.language");\r
- propertyPaths.add("descriptionElements.area");\r
- propertyPaths.add("descriptionElements.status");\r
- propertyPaths.add("descriptionElements.modifyingText");\r
- propertyPaths.add("descriptionElementsmodifyingText.language");\r
- propertyPaths.add("descriptionElements.modifiers");\r
-\r
- defaultBeanInitializer.initialize(descriptionBase, propertyPaths);\r
- }\r
- return descriptionBase;\r
- }\r
-\r
-\r
- @Override\r
- public <T extends DescriptionElementBase> List<T> getDescriptionElementForTaxon(\r
- UUID taxonUuid, Set<Feature> features,\r
- Class<T> type, Integer pageSize,\r
- Integer pageNumber, List<String> propertyPaths) {\r
-\r
-// Logger.getLogger("org.hibernate.SQL").setLevel(Level.TRACE);\r
-\r
- Query query = prepareGetDescriptionElementForTaxon(taxonUuid, features, type, pageSize, pageNumber, false);\r
-\r
- if (logger.isDebugEnabled()){logger.debug(" dao: get list ...");}\r
- @SuppressWarnings("unchecked")\r
- List<T> results = query.list();\r
- if (logger.isDebugEnabled()){logger.debug(" dao: initialize ...");}\r
- defaultBeanInitializer.initializeAll(results, propertyPaths);\r
- if (logger.isDebugEnabled()){logger.debug(" dao: initialize - DONE");}\r
-\r
-// Logger.getLogger("org.hibernate.SQL").setLevel(Level.WARN);\r
- return results;\r
- }\r
-\r
- @Override\r
- public <T extends DescriptionElementBase> long countDescriptionElementForTaxon(\r
- UUID taxonUuid, Set<Feature> features, Class<T> type) {\r
-\r
- Query query = prepareGetDescriptionElementForTaxon(taxonUuid, features, type, null, null, true);\r
-\r
- return (Long)query.uniqueResult();\r
- }\r
-\r
- /**\r
- * @param taxon\r
- * @param features\r
- * @param type\r
- * @param pageSize\r
- * @param pageNumber\r
- * @return\r
- */\r
- private <T extends DescriptionElementBase> Query prepareGetDescriptionElementForTaxon(UUID taxonUuid,\r
- Set<Feature> features, Class<T> type, Integer pageSize, Integer pageNumber, boolean asCountQuery) {\r
-\r
- String listOrCount;\r
- if(asCountQuery){\r
- listOrCount = "count(de)";\r
- } else {\r
- listOrCount = "de";\r
- }\r
-\r
- String queryString = "SELECT " + listOrCount + " FROM DescriptionElementBase AS de" +\r
- " LEFT JOIN de.inDescription AS d" +\r
- " LEFT JOIN d.taxon AS t" +\r
- " WHERE d.class = 'TaxonDescription' AND t.uuid = :taxon_uuid ";\r
-\r
- if(type != null){\r
- queryString += " and de.class = :type";\r
- }\r
- if (features != null && features.size() > 0){\r
- queryString += " and de.feature in (:features) ";\r
- }\r
-// System.out.println(queryString);\r
- Query query = getSession().createQuery(queryString);\r
-\r
- query.setParameter("taxon_uuid", taxonUuid);\r
- if(type != null){\r
- query.setParameter("type", type.getSimpleName());\r
- }\r
- if(features != null && features.size() > 0){\r
- query.setParameterList("features", features) ;\r
- }\r
-\r
- if(pageSize != null) {\r
- query.setMaxResults(pageSize);\r
- if(pageNumber != null) {\r
- query.setFirstResult(pageNumber * pageSize);\r
- }\r
- }\r
- return query;\r
- }\r
-\r
- /* (non-Javadoc)\r
- * @see eu.etaxonomy.cdm.persistence.dao.description.IDescriptionDao#listTaxonDescriptionMedia(java.util.UUID, java.lang.Boolean, java.util.Set, java.lang.Integer, java.lang.Integer, java.util.List)\r
- */\r
- @Override\r
- public List<Media> listTaxonDescriptionMedia(UUID taxonUuid,\r
- Boolean limitToGalleries, Set<MarkerType> markerTypes,\r
- Integer pageSize, Integer pageNumber, List<String> propertyPaths) {\r
-\r
- AuditEvent auditEvent = getAuditEventFromContext();\r
- if(auditEvent.equals(AuditEvent.CURRENT_VIEW)) {\r
- String queryString = " SELECT media " +\r
- getTaxonDescriptionMediaQueryString(\r
- taxonUuid, limitToGalleries, markerTypes);\r
- queryString +=\r
- " GROUP BY media "\r
-// " ORDER BY index(media) " //not functional\r
- ;\r
-\r
- Query query = getSession().createQuery(queryString);\r
-\r
- setTaxonDescriptionMediaParameters(query, taxonUuid, limitToGalleries, markerTypes);\r
-\r
-\r
-// addMarkerTypesCriterion(markerTypes, hql);\r
-\r
- setPagingParameter(query, pageSize, pageNumber);\r
-\r
- List<Media> results = query.list();\r
-\r
- defaultBeanInitializer.initializeAll(results, propertyPaths);\r
-\r
- return results;\r
- } else {\r
- throw new OperationNotSupportedInPriorViewException("countTaxonDescriptionMedia(UUID taxonUuid, boolean restrictToGalleries)");\r
- }\r
- }\r
-\r
-\r
- /* (non-Javadoc)\r
- * @see eu.etaxonomy.cdm.persistence.dao.description.IDescriptionDao#countTaxonDescriptionMedia(java.util.UUID, java.lang.Boolean, java.util.Set)\r
- */\r
- @Override\r
- public int countTaxonDescriptionMedia(UUID taxonUuid,\r
- Boolean limitToGalleries, Set<MarkerType> markerTypes) {\r
- AuditEvent auditEvent = getAuditEventFromContext();\r
- if(auditEvent.equals(AuditEvent.CURRENT_VIEW)) {\r
- String queryString = " SELECT count(DISTINCT media) " +\r
- getTaxonDescriptionMediaQueryString(\r
- taxonUuid, limitToGalleries, markerTypes);\r
-\r
- Query query = getSession().createQuery(queryString);\r
- setTaxonDescriptionMediaParameters(query, taxonUuid, limitToGalleries, markerTypes);\r
- return ((Long)query.uniqueResult()).intValue();\r
- }else{\r
- throw new OperationNotSupportedInPriorViewException("countTaxonDescriptionMedia(UUID taxonUuid)");\r
- }\r
-\r
- }\r
-\r
- private void setTaxonDescriptionMediaParameters(Query query, UUID taxonUuid, Boolean limitToGalleries, Set<MarkerType> markerTypes) {\r
- if(taxonUuid != null){\r
- query.setParameter("uuid", taxonUuid);\r
- }\r
-\r
- }\r
-\r
- /**\r
- * @param taxonUuid\r
- * @param restrictToGalleries\r
- * @param markerTypes\r
- * @return\r
- */\r
- private String getTaxonDescriptionMediaQueryString(UUID taxonUuid,\r
- Boolean restrictToGalleries, Set<MarkerType> markerTypes) {\r
- String fromQueryString =\r
- " FROM DescriptionElementBase as deb INNER JOIN " +\r
- " deb.inDescription as td "\r
- + " INNER JOIN td.taxon as t "\r
- + " JOIN deb.media as media "\r
- + " LEFT JOIN td.markers marker ";\r
-\r
- String whereQueryString = " WHERE (1=1) ";\r
- if (taxonUuid != null){\r
- whereQueryString += " AND t.uuid = :uuid ";\r
- }\r
- if (restrictToGalleries){\r
- whereQueryString += " AND td.imageGallery is true ";\r
- }\r
- if (markerTypes != null && !markerTypes.isEmpty()){\r
- whereQueryString += " AND (1=0";\r
- for (MarkerType markerType : markerTypes){\r
- whereQueryString += " OR ( marker.markerType.id = " + markerType.getId() + " AND marker.flag is true)";\r
-\r
- }\r
- whereQueryString += ") ";\r
- }\r
-\r
- return fromQueryString + whereQueryString;\r
-\r
- }\r
-\r
- /* (non-Javadoc)\r
- * @see eu.etaxonomy.cdm.persistence.dao.description.IDescriptionDao#listNamedAreasInUse(java.lang.Integer, java.lang.Integer, java.util.List)\r
- */\r
- @SuppressWarnings("unchecked")\r
- @Override\r
- public List<TermDto> listNamedAreasInUse(boolean includeAllParents, Integer pageSize, Integer pageNumber) {\r
-\r
-// Logger.getLogger("org.hibernate.SQL").setLevel(Level.TRACE);\r
-\r
- StringBuilder queryString = new StringBuilder(\r
- "SELECT DISTINCT a.id, a.partOf.id"\r
- + " FROM Distribution AS d JOIN d.area AS a");\r
- Query query = getSession().createQuery(queryString.toString());\r
-\r
- List<Object[]> areasInUse = query.list();\r
- List<Object[]> parentResults = new ArrayList<Object[]>();\r
-\r
- if(!areasInUse.isEmpty()) {\r
- Set<Object> allAreaIds = new HashSet<Object>(areasInUse.size());\r
-\r
- if(includeAllParents) {\r
- // find all parent nodes\r
- String allAreasQueryStr = "select a.id, a.partOf.id from NamedArea as a";\r
- query = getSession().createQuery(allAreasQueryStr);\r
- List<Object[]> allAreasResult = query.list();\r
- Map<Object, Object> allAreasMap = ArrayUtils.toMap(allAreasResult.toArray());\r
-\r
- Set<Object> parents = new HashSet<Object>();\r
-\r
- for(Object[] leaf : areasInUse) {\r
- allAreaIds.add(leaf[0]);\r
- Object parentId = leaf[1];\r
- while (parentId != null) {\r
- if(parents.contains(parentId)) {\r
- // break if the parent already is in the set\r
- break;\r
- }\r
- parents.add(parentId);\r
- parentId = allAreasMap.get(parentId);\r
- }\r
- }\r
- allAreaIds.addAll(parents);\r
- } else {\r
- // only add the ids found so far\r
- for(Object[] leaf : areasInUse) {\r
- allAreaIds.add(leaf[0]);\r
- }\r
- }\r
-\r
-\r
- // NOTE can't use "select new TermDto(distinct a.uuid, r , a.vocabulary.uuid) since we will get multiple\r
- // rows for a term with multiple representations\r
- String parentAreasQueryStr = "select a.uuid, r, p.uuid, v.uuid "\r
- + "from NamedArea as a LEFT JOIN a.partOf as p LEFT JOIN a.representations AS r LEFT JOIN a.vocabulary as v "\r
- + "where a.id in (:allAreaIds) order by a.idInVocabulary";\r
- query = getSession().createQuery(parentAreasQueryStr);\r
- query.setParameterList("allAreaIds", allAreaIds);\r
- if(pageSize != null) {\r
- query.setMaxResults(pageSize);\r
- if(pageNumber != null) {\r
- query.setFirstResult(pageNumber * pageSize);\r
- }\r
- }\r
- parentResults = query.list();\r
- }\r
- List<TermDto> dtoList = termDtoListFrom(parentResults);\r
-\r
- return dtoList;\r
- }\r
-\r
- /**\r
- * @param results\r
- * @return\r
- */\r
- private List<TermDto> termDtoListFrom(List<Object[]> results) {\r
- Map<UUID, TermDto> dtoMap = new HashMap<UUID, TermDto>(results.size());\r
- for (Object[] elements : results) {\r
- UUID uuid = (UUID)elements[0];\r
- if(dtoMap.containsKey(uuid)){\r
- dtoMap.get(uuid).addRepresentation((Representation)elements[1]);\r
- } else {\r
- Set<Representation> representations;\r
- if(elements[1] instanceof Representation) {\r
- representations = new HashSet<Representation>(1);\r
- representations.add((Representation)elements[1]);\r
- } else {\r
- representations = (Set<Representation>)elements[1];\r
- }\r
- dtoMap.put(uuid, new TermDto(uuid, representations, (UUID)elements[2], (UUID)elements[3]));\r
- }\r
- }\r
- return new ArrayList<TermDto>(dtoMap.values());\r
- }\r
-\r
-\r
-\r
-}\r
+/**
+* Copyright (C) 2007 EDIT
+* European Distributed Institute of Taxonomy
+* http://www.e-taxonomy.eu
+*
+* The contents of this file are subject to the Mozilla Public License Version 1.1
+* See LICENSE.TXT at the top of this package for the full license terms.
+*/
+
+package eu.etaxonomy.cdm.persistence.dao.hibernate.description;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.UUID;
+
+import org.apache.commons.lang.ArrayUtils;
+import org.apache.log4j.Logger;
+import org.hibernate.Criteria;
+import org.hibernate.Query;
+import org.hibernate.criterion.ProjectionList;
+import org.hibernate.criterion.Projections;
+import org.hibernate.criterion.Restrictions;
+import org.hibernate.envers.query.AuditEntity;
+import org.hibernate.envers.query.AuditQuery;
+import org.springframework.beans.factory.annotation.Qualifier;
+import org.springframework.stereotype.Repository;
+
+import eu.etaxonomy.cdm.model.common.DefinedTerm;
+import eu.etaxonomy.cdm.model.common.LSID;
+import eu.etaxonomy.cdm.model.common.MarkerType;
+import eu.etaxonomy.cdm.model.common.Representation;
+import eu.etaxonomy.cdm.model.description.CommonTaxonName;
+import eu.etaxonomy.cdm.model.description.DescriptionBase;
+import eu.etaxonomy.cdm.model.description.DescriptionElementBase;
+import eu.etaxonomy.cdm.model.description.Feature;
+import eu.etaxonomy.cdm.model.description.PresenceAbsenceTerm;
+import eu.etaxonomy.cdm.model.description.SpecimenDescription;
+import eu.etaxonomy.cdm.model.description.TaxonDescription;
+import eu.etaxonomy.cdm.model.description.TaxonNameDescription;
+import eu.etaxonomy.cdm.model.location.NamedArea;
+import eu.etaxonomy.cdm.model.media.Media;
+import eu.etaxonomy.cdm.model.name.TaxonNameBase;
+import eu.etaxonomy.cdm.model.taxon.Taxon;
+import eu.etaxonomy.cdm.model.view.AuditEvent;
+import eu.etaxonomy.cdm.persistence.dao.common.OperationNotSupportedInPriorViewException;
+import eu.etaxonomy.cdm.persistence.dao.description.IDescriptionDao;
+import eu.etaxonomy.cdm.persistence.dao.hibernate.common.IdentifiableDaoBase;
+import eu.etaxonomy.cdm.persistence.dto.TermDto;
+import eu.etaxonomy.cdm.persistence.query.MatchMode;
+import eu.etaxonomy.cdm.persistence.query.OrderHint;
+
+@Repository
+@Qualifier("descriptionDaoImpl")
+public class DescriptionDaoImpl extends IdentifiableDaoBase<DescriptionBase> implements IDescriptionDao{
+
+ private static final Logger logger = Logger.getLogger(DescriptionDaoImpl.class);
+
+ public DescriptionDaoImpl() {
+ super(DescriptionBase.class);
+ indexedClasses = new Class[3];
+ indexedClasses[0] = TaxonDescription.class;
+ indexedClasses[1] = TaxonNameDescription.class;
+ indexedClasses[2] = SpecimenDescription.class;
+ }
+
+// @Override //Override for testing
+// public DescriptionBase load(UUID uuid, List<String> propertyPaths){
+// DescriptionBase bean = findByUuid(uuid);
+// if(bean == null){
+// return bean;
+// }
+// defaultBeanInitializer.initialize(bean, propertyPaths);
+//
+// return bean;
+// }
+
+ @Override
+ public int countDescriptionByDistribution(Set<NamedArea> namedAreas, PresenceAbsenceTerm status) {
+ checkNotInPriorView("DescriptionDaoImpl.countDescriptionByDistribution(Set<NamedArea> namedAreas, PresenceAbsenceTermBase status)");
+ Query query = null;
+
+ if(status == null) {
+ query = getSession().createQuery("select count(distinct description) from TaxonDescription description left join description.descriptionElements element join element.area area where area in (:namedAreas)");
+ } else {
+ query = getSession().createQuery("select count(distinct description) from TaxonDescription description left join description.descriptionElements element join element.area area join element.status status where area in (:namedAreas) and status = :status");
+ query.setParameter("status", status);
+ }
+ query.setParameterList("namedAreas", namedAreas);
+
+ return ((Long)query.uniqueResult()).intValue();
+ }
+
+ @Override
+ public int countDescriptionElements(DescriptionBase description, Set<Feature> features, Class<? extends DescriptionElementBase> clazz) {
+ return countDescriptionElements(description, null, features, clazz);
+ }
+
+ @Override
+ public int countDescriptionElements(DescriptionBase description, Class<? extends DescriptionBase> descriptionType,
+ Set<Feature> features, Class<? extends DescriptionElementBase> clazz) {
+ AuditEvent auditEvent = getAuditEventFromContext();
+ if(auditEvent.equals(AuditEvent.CURRENT_VIEW)) {
+ Criteria criteria = null;
+ if(clazz == null) {
+ criteria = getSession().createCriteria(DescriptionElementBase.class);
+ } else {
+ criteria = getSession().createCriteria(clazz);
+ }
+
+ if(description != null) {
+ criteria.add(Restrictions.eq("inDescription", description));
+ }
+
+ if(descriptionType != null) {
+ criteria.createAlias("inDescription", "d").add(Restrictions.eq("d.class", descriptionType));
+ }
+
+ if(features != null && !features.isEmpty()) {
+ criteria.add(Restrictions.in("feature", features));
+ }
+
+ criteria.setProjection(Projections.rowCount());
+
+ return ((Number)criteria.uniqueResult()).intValue();
+ } else {
+ if(features != null && !features.isEmpty()) {
+ Integer count = 0;
+ for(Feature f : features) {
+ AuditQuery query = null;
+ if(clazz == null) {
+ query = getAuditReader().createQuery().forEntitiesAtRevision(DescriptionElementBase.class,auditEvent.getRevisionNumber());
+ } else {
+ query = getAuditReader().createQuery().forEntitiesAtRevision(clazz,auditEvent.getRevisionNumber());
+ }
+
+ if(description != null) {
+ query.add(AuditEntity.relatedId("inDescription").eq(description.getId()));
+ }
+
+ if(descriptionType != null) {
+ query.add(AuditEntity.property("inDescription.class").eq(descriptionType));
+ }
+
+ query.add(AuditEntity.relatedId("feature").eq(f.getId()));
+ query.addProjection(AuditEntity.id().count());
+ count += ((Long)query.getSingleResult()).intValue();
+ }
+
+ return count;
+ } else {
+ AuditQuery query = null;
+ if(clazz == null) {
+ query = getAuditReader().createQuery().forEntitiesAtRevision(DescriptionElementBase.class,auditEvent.getRevisionNumber());
+ } else {
+ query = getAuditReader().createQuery().forEntitiesAtRevision(clazz,auditEvent.getRevisionNumber());
+ }
+
+ if(description != null) {
+ query.add(AuditEntity.relatedId("inDescription").eq(description.getId()));
+ }
+ if(descriptionType != null) {
+ query.add(AuditEntity.property("inDescription.class").eq(descriptionType));
+ }
+
+ query.addProjection(AuditEntity.id().count());
+ return ((Long)query.getSingleResult()).intValue();
+ }
+ }
+ }
+
+ @Override
+ public int countDescriptions(Class<? extends DescriptionBase> clazz, Boolean hasImages, Boolean hasText, Set<Feature> features) {
+ checkNotInPriorView("DescriptionDaoImpl.countDescriptions(Class<TYPE> type, Boolean hasImages, Boolean hasText, Set<Feature> features)");
+ Criteria inner = null;
+
+ if(clazz == null) {
+ inner = getSession().createCriteria(type);
+ } else {
+ inner = getSession().createCriteria(clazz);
+ }
+
+ Criteria elementsCriteria = inner.createCriteria("descriptionElements");
+ if(hasText != null) {
+ if(hasText) {
+ elementsCriteria.add(Restrictions.isNotEmpty("multilanguageText"));
+ } else {
+ elementsCriteria.add(Restrictions.isEmpty("multilanguageText"));
+ }
+ }
+
+ if(hasImages != null) {
+ if(hasImages) {
+ elementsCriteria.add(Restrictions.isNotEmpty("media"));
+ } else {
+ elementsCriteria.add(Restrictions.isEmpty("media"));
+ }
+ }
+
+ if(features != null && !features.isEmpty()) {
+ elementsCriteria.add(Restrictions.in("feature", features));
+ }
+
+ inner.setProjection(Projections.countDistinct("id"));
+
+ return ((Number) inner.uniqueResult()).intValue();
+ }
+
+ @Override
+ public int countTaxonDescriptions(Taxon taxon, Set<DefinedTerm> scopes,Set<NamedArea> geographicalScopes, Set<MarkerType> markerTypes) {
+ AuditEvent auditEvent = getAuditEventFromContext();
+ if(auditEvent.equals(AuditEvent.CURRENT_VIEW)) {
+ Criteria criteria = getSession().createCriteria(TaxonDescription.class);
+
+ if(taxon != null) {
+ criteria.add(Restrictions.eq("taxon", taxon));
+ }
+
+ if(scopes != null && !scopes.isEmpty()) {
+ Set<Integer> scopeIds = new HashSet<Integer>();
+ for(DefinedTerm s : scopes) {
+ scopeIds.add(s.getId());
+ }
+ criteria.createCriteria("scopes").add(Restrictions.in("id", scopeIds));
+ }
+
+ if(geographicalScopes != null && !geographicalScopes.isEmpty()) {
+ Set<Integer> geoScopeIds = new HashSet<Integer>();
+ for(NamedArea n : geographicalScopes) {
+ geoScopeIds.add(n.getId());
+ }
+ criteria.createCriteria("geoScopes").add(Restrictions.in("id", geoScopeIds));
+ }
+
+
+ addMarkerTypesCriterion(markerTypes, criteria);
+
+
+ criteria.setProjection(Projections.rowCount());
+
+ return ((Number)criteria.uniqueResult()).intValue();
+ } else {
+ if((scopes == null || scopes.isEmpty())&& (geographicalScopes == null || geographicalScopes.isEmpty()) && (markerTypes == null || markerTypes.isEmpty())) {
+ AuditQuery query = getAuditReader().createQuery().forEntitiesAtRevision(TaxonDescription.class,auditEvent.getRevisionNumber());
+ if(taxon != null) {
+ query.add(AuditEntity.relatedId("taxon").eq(taxon.getId()));
+ }
+
+ query.addProjection(AuditEntity.id().count());
+
+ return ((Long)query.getSingleResult()).intValue();
+ } else {
+ throw new OperationNotSupportedInPriorViewException("countTaxonDescriptions(Taxon taxon, Set<Scope> scopes,Set<NamedArea> geographicalScopes)");
+ }
+ }
+ }
+
+ /**
+ * @param markerTypes
+ * @param criteria
+ *
+ */
+ //TODO move to AnnotatableEntityDao(?)
+ private void addMarkerTypesCriterion(Set<MarkerType> markerTypes, Criteria criteria) {
+
+ if(markerTypes != null && !markerTypes.isEmpty()) {
+ Set<Integer> markerTypeIds = new HashSet<Integer>();
+ for(MarkerType markerType : markerTypes) {
+ markerTypeIds.add(markerType.getId());
+ }
+ criteria.createCriteria("markers").add(Restrictions.eq("flag", true))
+ .createAlias("markerType", "mt")
+ .add(Restrictions.in("mt.id", markerTypeIds));
+ } else if (markerTypes != null && markerTypes.isEmpty()){
+ //AT: added in case the projects requires an third state description, An empty Marker type set
+ }
+ }
+ @Override
+ public List<DescriptionElementBase> getDescriptionElements(
+ DescriptionBase description, Set<Feature> features,
+ Class<? extends DescriptionElementBase> clazz, Integer pageSize, Integer pageNumber, List<String> propertyPaths) {
+ return getDescriptionElements(description, null, features, clazz, pageSize, pageNumber, propertyPaths);
+ }
+
+ @Override
+ public List<DescriptionElementBase> getDescriptionElements(
+ DescriptionBase description, Class<? extends DescriptionBase> descriptionType,
+ Set<Feature> features,
+ Class<? extends DescriptionElementBase> clazz,
+ Integer pageSize, Integer pageNumber, List<String> propertyPaths) {
+
+ AuditEvent auditEvent = getAuditEventFromContext();
+ if(auditEvent.equals(AuditEvent.CURRENT_VIEW)) {
+ Criteria criteria = null;
+ if(clazz == null) {
+ criteria = getSession().createCriteria(DescriptionElementBase.class);
+ } else {
+ criteria = getSession().createCriteria(clazz);
+ }
+
+ if(description != null) {
+ criteria.add(Restrictions.eq("inDescription", description));
+ }
+ if(descriptionType != null) {
+ criteria.createAlias("inDescription", "d").add(Restrictions.eq("d.class", descriptionType));
+ }
+
+ if(features != null && !features.isEmpty()) {
+ criteria.add(Restrictions.in("feature", features));
+ }
+
+ if(pageSize != null) {
+ criteria.setMaxResults(pageSize);
+ if(pageNumber != null) {
+ criteria.setFirstResult(pageNumber * pageSize);
+ }
+ }
+
+ List<DescriptionElementBase> results = criteria.list();
+
+ defaultBeanInitializer.initializeAll(results, propertyPaths);
+
+ return results;
+ } else {
+ List<DescriptionElementBase> result = new ArrayList<DescriptionElementBase>();
+ if(features != null && !features.isEmpty()) {
+
+ for(Feature f : features) {
+ AuditQuery query = null;
+ if(clazz == null) {
+ query = getAuditReader().createQuery().forEntitiesAtRevision(DescriptionElementBase.class,auditEvent.getRevisionNumber());
+ } else {
+ query = getAuditReader().createQuery().forEntitiesAtRevision(clazz,auditEvent.getRevisionNumber());
+ }
+
+ if(description != null) {
+ query.add(AuditEntity.relatedId("inDescription").eq(description.getId()));
+ }
+
+ if(descriptionType != null) {
+ query.add(AuditEntity.property("inDescription.class").eq(descriptionType));
+ }
+
+ query.add(AuditEntity.relatedId("feature").eq(f.getId()));
+ result.addAll(query.getResultList());
+ }
+ } else {
+ AuditQuery query = null;
+ if(clazz == null) {
+ query = getAuditReader().createQuery().forEntitiesAtRevision(DescriptionElementBase.class,auditEvent.getRevisionNumber());
+ } else {
+ query = getAuditReader().createQuery().forEntitiesAtRevision(clazz,auditEvent.getRevisionNumber());
+ }
+
+ if(description != null) {
+ query.add(AuditEntity.relatedId("inDescription").eq(description.getId()));
+ }
+
+ if(descriptionType != null) {
+ query.add(AuditEntity.property("inDescription.class").eq(descriptionType));
+ }
+
+ result = query.getResultList();
+ }
+
+ defaultBeanInitializer.initializeAll(result, propertyPaths);
+
+ return result;
+ }
+ }
+
+ @Override
+ public List<TaxonDescription> listTaxonDescriptions(Taxon taxon, Set<DefinedTerm> scopes, Set<NamedArea> geographicalScopes, Set<MarkerType> markerTypes, Integer pageSize, Integer pageNumber, List<String> propertyPaths) {
+ AuditEvent auditEvent = getAuditEventFromContext();
+ if(auditEvent.equals(AuditEvent.CURRENT_VIEW)) {
+ Criteria criteria = getSession().createCriteria(TaxonDescription.class);
+
+ if(taxon != null) {
+ criteria.add(Restrictions.eq("taxon", taxon));
+ }
+
+ if(scopes != null && !scopes.isEmpty()) {
+ Set<Integer> scopeIds = new HashSet<Integer>();
+ for(DefinedTerm s : scopes) {
+ scopeIds.add(s.getId());
+ }
+ criteria.createCriteria("scopes").add(Restrictions.in("id", scopeIds));
+ }
+
+ if(geographicalScopes != null && !geographicalScopes.isEmpty()) {
+ Set<Integer> geoScopeIds = new HashSet<Integer>();
+ for(NamedArea n : geographicalScopes) {
+ geoScopeIds.add(n.getId());
+ }
+ criteria.createCriteria("geoScopes").add(Restrictions.in("id", geoScopeIds));
+ }
+
+ addMarkerTypesCriterion(markerTypes, criteria);
+
+ if(pageSize != null) {
+ criteria.setMaxResults(pageSize);
+ if(pageNumber != null) {
+ criteria.setFirstResult(pageNumber * pageSize);
+ }
+ }
+
+ List<TaxonDescription> results = criteria.list();
+
+ defaultBeanInitializer.initializeAll(results, propertyPaths);
+
+ return results;
+ } else {
+ if((scopes == null || scopes.isEmpty())&& (geographicalScopes == null || geographicalScopes.isEmpty())&& (markerTypes == null || markerTypes.isEmpty())) {
+ AuditQuery query = getAuditReader().createQuery().forEntitiesAtRevision(TaxonDescription.class,auditEvent.getRevisionNumber());
+ if(taxon != null) {
+ query.add(AuditEntity.relatedId("taxon").eq(taxon.getId()));
+ }
+
+ if(pageSize != null) {
+ query.setMaxResults(pageSize);
+ if(pageNumber != null) {
+ query.setFirstResult(pageNumber * pageSize);
+ } else {
+ query.setFirstResult(0);
+ }
+ }
+
+ List<TaxonDescription> results = query.getResultList();
+ defaultBeanInitializer.initializeAll(results, propertyPaths);
+ return results;
+ } else {
+ throw new OperationNotSupportedInPriorViewException("countTaxonDescriptions(Taxon taxon, Set<Scope> scopes,Set<NamedArea> geographicalScopes)");
+ }
+ }
+ }
+
+ @Override
+ public List<TaxonNameDescription> getTaxonNameDescriptions(TaxonNameBase name, Integer pageSize, Integer pageNumber, List<String> propertyPaths) {
+ AuditEvent auditEvent = getAuditEventFromContext();
+ if(auditEvent.equals(AuditEvent.CURRENT_VIEW)) {
+ Criteria criteria = getSession().createCriteria(TaxonNameDescription.class);
+
+ if(name != null) {
+ criteria.add(Restrictions.eq("taxonName", name));
+ }
+
+ if(pageSize != null) {
+ criteria.setMaxResults(pageSize);
+ if(pageNumber != null) {
+ criteria.setFirstResult(pageNumber * pageSize);
+ }
+ }
+
+ List<TaxonNameDescription> results = criteria.list();
+
+ defaultBeanInitializer.initializeAll(results, propertyPaths);
+
+ return results;
+ } else {
+ AuditQuery query = getAuditReader().createQuery().forEntitiesAtRevision(TaxonNameDescription.class,auditEvent.getRevisionNumber());
+
+ if(name != null) {
+ query.add(AuditEntity.relatedId("taxonName").eq(name.getId()));
+ }
+
+ if(pageSize != null) {
+ query.setMaxResults(pageSize);
+ if(pageNumber != null) {
+ query.setFirstResult(pageNumber * pageSize);
+ }
+ }
+
+ List<TaxonNameDescription> results = query.getResultList();
+
+ defaultBeanInitializer.initializeAll(results, propertyPaths);
+
+ return results;
+ }
+
+ }
+
+ @Override
+ public int countTaxonNameDescriptions(TaxonNameBase name) {
+ AuditEvent auditEvent = getAuditEventFromContext();
+ if(auditEvent.equals(AuditEvent.CURRENT_VIEW)) {
+ Criteria criteria = getSession().createCriteria(TaxonNameDescription.class);
+
+ if(name != null) {
+ criteria.add(Restrictions.eq("taxonName", name));
+ }
+
+ criteria.setProjection(Projections.rowCount());
+
+ return ((Number)criteria.uniqueResult()).intValue();
+ } else {
+ AuditQuery query = getAuditReader().createQuery().forEntitiesAtRevision(TaxonNameDescription.class,auditEvent.getRevisionNumber());
+
+ if(name != null) {
+ query.add(AuditEntity.relatedId("taxonName").eq(name.getId()));
+ }
+
+ query.addProjection(AuditEntity.id().count());
+ return ((Long)query.getSingleResult()).intValue();
+ }
+ }
+
+ /**
+ * Should use a DetachedCriteria & subquery, but HHH-158 prevents this, for now.
+ *
+ * e.g. DetachedCriteria inner = DestachedCriteria.forClass(type);
+ *
+ * outer.add(Subqueries.propertyIn("id", inner));
+ */
+ @Override
+ public List<DescriptionBase> listDescriptions(Class<? extends DescriptionBase> clazz, Boolean hasImages, Boolean hasText, Set<Feature> features, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) {
+ checkNotInPriorView("DescriptionDaoImpl.listDescriptions(Class<TYPE> type, Boolean hasImages, Boolean hasText, Set<Feature> features, Integer pageSize, Integer pageNumber)");
+ Criteria inner = null;
+
+ if(clazz == null) {
+ inner = getSession().createCriteria(type);
+ } else {
+ inner = getSession().createCriteria(clazz);
+ }
+
+ Criteria elementsCriteria = inner.createCriteria("descriptionElements");
+ if(hasText != null) {
+ if(hasText) {
+ elementsCriteria.add(Restrictions.isNotEmpty("multilanguageText"));
+ } else {
+ elementsCriteria.add(Restrictions.isEmpty("multilanguageText"));
+ }
+ }
+
+ if(hasImages != null) {
+ if(hasImages) {
+ elementsCriteria.add(Restrictions.isNotEmpty("media"));
+ } else {
+ elementsCriteria.add(Restrictions.isEmpty("media"));
+ }
+ }
+
+ if(features != null && !features.isEmpty()) {
+ elementsCriteria.add(Restrictions.in("feature", features));
+ }
+
+ inner.setProjection(Projections.distinct(Projections.id()));
+
+ List<Object> intermediateResult = inner.list();
+
+ if(intermediateResult.isEmpty()) {
+ return new ArrayList<DescriptionBase>();
+ }
+
+ Integer[] resultIds = new Integer[intermediateResult.size()];
+ for(int i = 0; i < resultIds.length; i++) {
+ resultIds[i] = ((Number)intermediateResult.get(i)).intValue();
+ }
+
+ Criteria outer = null;
+
+ if(clazz == null) {
+ outer = getSession().createCriteria(type);
+ } else {
+ outer = getSession().createCriteria(clazz);
+ }
+
+ outer.add(Restrictions.in("id", resultIds));
+ addOrder(outer, orderHints);
+
+ if(pageSize != null) {
+ outer.setMaxResults(pageSize);
+ if(pageNumber != null) {
+ outer.setFirstResult(pageNumber * pageSize);
+ }
+ }
+
+ List<DescriptionBase> results = outer.list();
+ defaultBeanInitializer.initializeAll(results, propertyPaths);
+ return results;
+ }
+
+ @Override
+ public List<TaxonDescription> searchDescriptionByDistribution(Set<NamedArea> namedAreas, PresenceAbsenceTerm status, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) {
+ checkNotInPriorView("DescriptionDaoImpl.searchDescriptionByDistribution(Set<NamedArea> namedAreas, PresenceAbsenceTermBase status, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths)");
+
+ Criteria criteria = getSession().createCriteria(TaxonDescription.class);
+ Criteria elements = criteria.createCriteria("descriptionElements", "descriptionElement", Criteria.LEFT_JOIN);
+ elements.add(Restrictions.in("area", namedAreas.toArray()));
+
+ if(status != null) {
+ elements.add(Restrictions.eq("status", status));
+ }
+
+ ProjectionList projectionList = Projections.projectionList().add(Projections.id());
+
+ if(orderHints != null && !orderHints.isEmpty()) {
+ for(OrderHint orderHint : orderHints) {
+ projectionList = projectionList.add(Projections.property(orderHint.getPropertyName()));
+ }
+ }
+
+ criteria.setProjection(Projections.distinct(projectionList));
+
+ if(pageSize != null) {
+ criteria.setMaxResults(pageSize);
+ if(pageNumber != null) {
+ criteria.setFirstResult(pageNumber * pageSize);
+ }
+ }
+
+ addOrder(criteria,orderHints);
+
+ List<Object> intermediateResult = criteria.list();
+
+ if(intermediateResult.isEmpty()) {
+ return new ArrayList<TaxonDescription>();
+ }
+
+ Integer[] resultIds = new Integer[intermediateResult.size()];
+ for(int i = 0; i < resultIds.length; i++) {
+ if(orderHints == null || orderHints.isEmpty()) {
+ resultIds[i] = ((Number)intermediateResult.get(i)).intValue();
+ } else {
+ resultIds[i] = ((Number)((Object[])intermediateResult.get(i))[0]).intValue();
+ }
+ }
+
+ criteria = getSession().createCriteria(TaxonDescription.class);
+ criteria.add(Restrictions.in("id", resultIds));
+ addOrder(criteria,orderHints);
+
+ List<TaxonDescription> results = criteria.list();
+ defaultBeanInitializer.initializeAll(results, propertyPaths);
+ return results;
+ }
+
+ @Override
+ public List<CommonTaxonName> searchDescriptionByCommonName(String queryString, MatchMode matchMode, Integer pageSize, Integer pageNumber) {
+
+ Criteria crit = getSession().createCriteria(CommonTaxonName.class);
+ if (matchMode == MatchMode.EXACT) {
+ crit.add(Restrictions.eq("name", matchMode.queryStringFrom(queryString)));
+ } else {
+ crit.add(Restrictions.ilike("name", matchMode.queryStringFrom(queryString)));
+ }
+
+ if(pageSize != null) {
+ crit.setMaxResults(pageSize);
+ if(pageNumber != null) {
+ crit.setFirstResult(pageNumber * pageSize);
+ }
+ }
+ List<CommonTaxonName> results = crit.list();
+ return results;
+ }
+
+ @Override
+ public Integer countDescriptionByCommonName(String queryString, MatchMode matchMode) {
+ //TODO inprove performance
+ List<CommonTaxonName> results = searchDescriptionByCommonName(queryString, matchMode, null, null);
+ return results.size();
+ }
+
+ @Override
+ public DescriptionBase find(LSID lsid) {
+ DescriptionBase descriptionBase = super.find(lsid);
+ if(descriptionBase != null) {
+ List<String> propertyPaths = new ArrayList<String>();
+ propertyPaths.add("createdBy");
+ propertyPaths.add("updatedBy");
+ propertyPaths.add("taxon");
+ propertyPaths.add("taxonName");
+ propertyPaths.add("descriptionElements");
+ propertyPaths.add("descriptionElements.createdBy");
+ propertyPaths.add("descriptionElements.updatedBy");
+ propertyPaths.add("descriptionElements.feature");
+ propertyPaths.add("descriptionElements.multilanguageText");
+ propertyPaths.add("descriptionElements.multilanguageText.language");
+ propertyPaths.add("descriptionElements.area");
+ propertyPaths.add("descriptionElements.status");
+ propertyPaths.add("descriptionElements.modifyingText");
+ propertyPaths.add("descriptionElementsmodifyingText.language");
+ propertyPaths.add("descriptionElements.modifiers");
+
+ defaultBeanInitializer.initialize(descriptionBase, propertyPaths);
+ }
+ return descriptionBase;
+ }
+
+
+ @Override
+ public <T extends DescriptionElementBase> List<T> getDescriptionElementForTaxon(
+ UUID taxonUuid, Set<Feature> features,
+ Class<T> type, Integer pageSize,
+ Integer pageNumber, List<String> propertyPaths) {
+
+// Logger.getLogger("org.hibernate.SQL").setLevel(Level.TRACE);
+
+ Query query = prepareGetDescriptionElementForTaxon(taxonUuid, features, type, pageSize, pageNumber, false);
+
+ if (logger.isDebugEnabled()){logger.debug(" dao: get list ...");}
+ @SuppressWarnings("unchecked")
+ List<T> results = query.list();
+ if (logger.isDebugEnabled()){logger.debug(" dao: initialize ...");}
+ defaultBeanInitializer.initializeAll(results, propertyPaths);
+ if (logger.isDebugEnabled()){logger.debug(" dao: initialize - DONE");}
+
+// Logger.getLogger("org.hibernate.SQL").setLevel(Level.WARN);
+ return results;
+ }
+
+ @Override
+ public <T extends DescriptionElementBase> long countDescriptionElementForTaxon(
+ UUID taxonUuid, Set<Feature> features, Class<T> type) {
+
+ Query query = prepareGetDescriptionElementForTaxon(taxonUuid, features, type, null, null, true);
+
+ return (Long)query.uniqueResult();
+ }
+
+ /**
+ * @param taxon
+ * @param features
+ * @param type
+ * @param pageSize
+ * @param pageNumber
+ * @return
+ */
+ private <T extends DescriptionElementBase> Query prepareGetDescriptionElementForTaxon(UUID taxonUuid,
+ Set<Feature> features, Class<T> type, Integer pageSize, Integer pageNumber, boolean asCountQuery) {
+
+ String listOrCount;
+ if(asCountQuery){
+ listOrCount = "count(de)";
+ } else {
+ listOrCount = "de";
+ }
+
+ String queryString = "SELECT " + listOrCount + " FROM DescriptionElementBase AS de" +
+ " LEFT JOIN de.inDescription AS d" +
+ " LEFT JOIN d.taxon AS t" +
+ " WHERE d.class = 'TaxonDescription' AND t.uuid = :taxon_uuid ";
+
+ if(type != null){
+ queryString += " and de.class = :type";
+ }
+ if (features != null && features.size() > 0){
+ queryString += " and de.feature in (:features) ";
+ }
+// System.out.println(queryString);
+ Query query = getSession().createQuery(queryString);
+
+ query.setParameter("taxon_uuid", taxonUuid);
+ if(type != null){
+ query.setParameter("type", type.getSimpleName());
+ }
+ if(features != null && features.size() > 0){
+ query.setParameterList("features", features) ;
+ }
+
+ if(pageSize != null) {
+ query.setMaxResults(pageSize);
+ if(pageNumber != null) {
+ query.setFirstResult(pageNumber * pageSize);
+ }
+ }
+ return query;
+ }
+
+ /* (non-Javadoc)
+ * @see eu.etaxonomy.cdm.persistence.dao.description.IDescriptionDao#listTaxonDescriptionMedia(java.util.UUID, java.lang.Boolean, java.util.Set, java.lang.Integer, java.lang.Integer, java.util.List)
+ */
+ @Override
+ public List<Media> listTaxonDescriptionMedia(UUID taxonUuid,
+ Boolean limitToGalleries, Set<MarkerType> markerTypes,
+ Integer pageSize, Integer pageNumber, List<String> propertyPaths) {
+
+ AuditEvent auditEvent = getAuditEventFromContext();
+ if(auditEvent.equals(AuditEvent.CURRENT_VIEW)) {
+ String queryString = " SELECT media " +
+ getTaxonDescriptionMediaQueryString(
+ taxonUuid, limitToGalleries, markerTypes);
+ queryString +=
+ " GROUP BY media "
+// " ORDER BY index(media) " //not functional
+ ;
+
+ Query query = getSession().createQuery(queryString);
+
+ setTaxonDescriptionMediaParameters(query, taxonUuid, limitToGalleries, markerTypes);
+
+
+// addMarkerTypesCriterion(markerTypes, hql);
+
+ setPagingParameter(query, pageSize, pageNumber);
+
+ List<Media> results = query.list();
+
+ defaultBeanInitializer.initializeAll(results, propertyPaths);
+
+ return results;
+ } else {
+ throw new OperationNotSupportedInPriorViewException("countTaxonDescriptionMedia(UUID taxonUuid, boolean restrictToGalleries)");
+ }
+ }
+
+
+ /* (non-Javadoc)
+ * @see eu.etaxonomy.cdm.persistence.dao.description.IDescriptionDao#countTaxonDescriptionMedia(java.util.UUID, java.lang.Boolean, java.util.Set)
+ */
+ @Override
+ public int countTaxonDescriptionMedia(UUID taxonUuid,
+ Boolean limitToGalleries, Set<MarkerType> markerTypes) {
+ AuditEvent auditEvent = getAuditEventFromContext();
+ if(auditEvent.equals(AuditEvent.CURRENT_VIEW)) {
+ String queryString = " SELECT count(DISTINCT media) " +
+ getTaxonDescriptionMediaQueryString(
+ taxonUuid, limitToGalleries, markerTypes);
+
+ Query query = getSession().createQuery(queryString);
+ setTaxonDescriptionMediaParameters(query, taxonUuid, limitToGalleries, markerTypes);
+ return ((Long)query.uniqueResult()).intValue();
+ }else{
+ throw new OperationNotSupportedInPriorViewException("countTaxonDescriptionMedia(UUID taxonUuid)");
+ }
+
+ }
+
+ private void setTaxonDescriptionMediaParameters(Query query, UUID taxonUuid, Boolean limitToGalleries, Set<MarkerType> markerTypes) {
+ if(taxonUuid != null){
+ query.setParameter("uuid", taxonUuid);
+ }
+
+ }
+
+ /**
+ * @param taxonUuid
+ * @param restrictToGalleries
+ * @param markerTypes
+ * @return
+ */
+ private String getTaxonDescriptionMediaQueryString(UUID taxonUuid,
+ Boolean restrictToGalleries, Set<MarkerType> markerTypes) {
+ String fromQueryString =
+ " FROM DescriptionElementBase as deb INNER JOIN " +
+ " deb.inDescription as td "
+ + " INNER JOIN td.taxon as t "
+ + " JOIN deb.media as media "
+ + " LEFT JOIN td.markers marker ";
+
+ String whereQueryString = " WHERE (1=1) ";
+ if (taxonUuid != null){
+ whereQueryString += " AND t.uuid = :uuid ";
+ }
+ if (restrictToGalleries){
+ whereQueryString += " AND td.imageGallery is true ";
+ }
+ if (markerTypes != null && !markerTypes.isEmpty()){
+ whereQueryString += " AND (1=0";
+ for (MarkerType markerType : markerTypes){
+ whereQueryString += " OR ( marker.markerType.id = " + markerType.getId() + " AND marker.flag is true)";
+
+ }
+ whereQueryString += ") ";
+ }
+
+ return fromQueryString + whereQueryString;
+
+ }
+
+ /* (non-Javadoc)
+ * @see eu.etaxonomy.cdm.persistence.dao.description.IDescriptionDao#listNamedAreasInUse(java.lang.Integer, java.lang.Integer, java.util.List)
+ */
+ @SuppressWarnings("unchecked")
+ @Override
+ public List<TermDto> listNamedAreasInUse(boolean includeAllParents, Integer pageSize, Integer pageNumber) {
+
+// Logger.getLogger("org.hibernate.SQL").setLevel(Level.TRACE);
+
+ StringBuilder queryString = new StringBuilder(
+ "SELECT DISTINCT a.id, a.partOf.id"
+ + " FROM Distribution AS d JOIN d.area AS a");
+ Query query = getSession().createQuery(queryString.toString());
+
+ List<Object[]> areasInUse = query.list();
+ List<Object[]> parentResults = new ArrayList<Object[]>();
+
+ if(!areasInUse.isEmpty()) {
+ Set<Object> allAreaIds = new HashSet<Object>(areasInUse.size());
+
+ if(includeAllParents) {
+ // find all parent nodes
+ String allAreasQueryStr = "select a.id, a.partOf.id from NamedArea as a";
+ query = getSession().createQuery(allAreasQueryStr);
+ List<Object[]> allAreasResult = query.list();
+ Map<Object, Object> allAreasMap = ArrayUtils.toMap(allAreasResult.toArray());
+
+ Set<Object> parents = new HashSet<Object>();
+
+ for(Object[] leaf : areasInUse) {
+ allAreaIds.add(leaf[0]);
+ Object parentId = leaf[1];
+ while (parentId != null) {
+ if(parents.contains(parentId)) {
+ // break if the parent already is in the set
+ break;
+ }
+ parents.add(parentId);
+ parentId = allAreasMap.get(parentId);
+ }
+ }
+ allAreaIds.addAll(parents);
+ } else {
+ // only add the ids found so far
+ for(Object[] leaf : areasInUse) {
+ allAreaIds.add(leaf[0]);
+ }
+ }
+
+
+ // NOTE can't use "select new TermDto(distinct a.uuid, r , a.vocabulary.uuid) since we will get multiple
+ // rows for a term with multiple representations
+ String parentAreasQueryStr = "select a.uuid, r, p.uuid, v.uuid "
+ + "from NamedArea as a LEFT JOIN a.partOf as p LEFT JOIN a.representations AS r LEFT JOIN a.vocabulary as v "
+ + "where a.id in (:allAreaIds) order by a.idInVocabulary";
+ query = getSession().createQuery(parentAreasQueryStr);
+ query.setParameterList("allAreaIds", allAreaIds);
+ if(pageSize != null) {
+ query.setMaxResults(pageSize);
+ if(pageNumber != null) {
+ query.setFirstResult(pageNumber * pageSize);
+ }
+ }
+ parentResults = query.list();
+ }
+ List<TermDto> dtoList = termDtoListFrom(parentResults);
+
+ return dtoList;
+ }
+
+ /**
+ * @param results
+ * @return
+ */
+ private List<TermDto> termDtoListFrom(List<Object[]> results) {
+ Map<UUID, TermDto> dtoMap = new HashMap<UUID, TermDto>(results.size());
+ for (Object[] elements : results) {
+ UUID uuid = (UUID)elements[0];
+ if(dtoMap.containsKey(uuid)){
+ dtoMap.get(uuid).addRepresentation((Representation)elements[1]);
+ } else {
+ Set<Representation> representations;
+ if(elements[1] instanceof Representation) {
+ representations = new HashSet<Representation>(1);
+ representations.add((Representation)elements[1]);
+ } else {
+ representations = (Set<Representation>)elements[1];
+ }
+ dtoMap.put(uuid, new TermDto(uuid, representations, (UUID)elements[2], (UUID)elements[3]));
+ }
+ }
+ return new ArrayList<TermDto>(dtoMap.values());
+ }
+
+
+
+}
-/**\r
-* Copyright (C) 2008 EDIT\r
-* European Distributed Institute of Taxonomy \r
-* http://www.e-taxonomy.eu\r
-*/\r
-\r
-package eu.etaxonomy.cdm.persistence.dao.hibernate.media;\r
-\r
-import java.util.HashSet;\r
-import java.util.List;\r
-import java.util.Set;\r
-\r
-import org.hibernate.Criteria;\r
-import org.hibernate.Hibernate;\r
-import org.hibernate.Query;\r
-import org.hibernate.criterion.Projections;\r
-import org.hibernate.criterion.Restrictions;\r
-import org.hibernate.envers.query.AuditEntity;\r
-import org.hibernate.envers.query.AuditQuery;\r
-import org.hibernate.search.FullTextSession;\r
-import org.hibernate.search.Search;\r
-import org.springframework.stereotype.Repository;\r
-\r
-import eu.etaxonomy.cdm.model.description.MediaKey;\r
-import eu.etaxonomy.cdm.model.location.NamedArea;\r
-import eu.etaxonomy.cdm.model.media.Media;\r
-import eu.etaxonomy.cdm.model.media.Rights;\r
-import eu.etaxonomy.cdm.model.molecular.PhylogeneticTree;\r
-import eu.etaxonomy.cdm.model.taxon.Taxon;\r
-import eu.etaxonomy.cdm.model.view.AuditEvent;\r
-import eu.etaxonomy.cdm.persistence.dao.common.OperationNotSupportedInPriorViewException;\r
-import eu.etaxonomy.cdm.persistence.dao.hibernate.common.IdentifiableDaoBase;\r
-import eu.etaxonomy.cdm.persistence.dao.media.IMediaDao;\r
-\r
-/**\r
- * @author a.babadshanjan\r
- * @created 08.09.2008\r
- */\r
-@Repository\r
-public class MediaDaoHibernateImpl extends IdentifiableDaoBase<Media> implements IMediaDao {\r
-\r
- protected String getDefaultField() {\r
- return "title.text";\r
- }\r
- \r
- public MediaDaoHibernateImpl() {\r
- super(Media.class);\r
- indexedClasses = new Class[3];\r
- indexedClasses[0] = Media.class;\r
- indexedClasses[1] = MediaKey.class;\r
- indexedClasses[2] = PhylogeneticTree.class;\r
- }\r
-\r
- public int countMediaKeys(Set<Taxon> taxonomicScope, Set<NamedArea> geoScopes) {\r
- AuditEvent auditEvent = getAuditEventFromContext();\r
- if(auditEvent.equals(AuditEvent.CURRENT_VIEW)) {\r
- Criteria criteria = getSession().createCriteria(MediaKey.class);\r
- \r
- if(taxonomicScope != null && !taxonomicScope.isEmpty()) {\r
- Set<Integer> taxonomicScopeIds = new HashSet<Integer>();\r
- for(Taxon n : taxonomicScope) {\r
- taxonomicScopeIds.add(n.getId());\r
- }\r
- criteria.createCriteria("taxonomicScope").add(Restrictions.in("id", taxonomicScopeIds));\r
- }\r
- \r
- if(geoScopes != null && !geoScopes.isEmpty()) {\r
- Set<Integer> geoScopeIds = new HashSet<Integer>();\r
- for(NamedArea n : geoScopes) {\r
- geoScopeIds.add(n.getId());\r
- }\r
- criteria.createCriteria("geographicalScope").add(Restrictions.in("id", geoScopeIds));\r
- }\r
- \r
- criteria.setProjection(Projections.countDistinct("id"));\r
- \r
- return ((Number)criteria.uniqueResult()).intValue();\r
- } else {\r
- if((taxonomicScope == null || taxonomicScope.isEmpty()) && (geoScopes == null || geoScopes.isEmpty())) {\r
- AuditQuery query = getAuditReader().createQuery().forEntitiesAtRevision(MediaKey.class,auditEvent.getRevisionNumber());\r
- query.addProjection(AuditEntity.id().count("id"));\r
- return ((Long)query.getSingleResult()).intValue();\r
- } else {\r
- throw new OperationNotSupportedInPriorViewException("countMediaKeys(Set<Taxon> taxonomicScope, Set<NamedArea> geoScopes)");\r
- }\r
- }\r
- }\r
-\r
- public List<MediaKey> getMediaKeys(Set<Taxon> taxonomicScope, Set<NamedArea> geoScopes, Integer pageSize, Integer pageNumber, List<String> propertyPaths) {\r
- AuditEvent auditEvent = getAuditEventFromContext();\r
- if(auditEvent.equals(AuditEvent.CURRENT_VIEW)) {\r
- Criteria inner = getSession().createCriteria(MediaKey.class);\r
-\r
- if(taxonomicScope != null && !taxonomicScope.isEmpty()) {\r
- Set<Integer> taxonomicScopeIds = new HashSet<Integer>();\r
- for(Taxon n : taxonomicScope) {\r
- taxonomicScopeIds.add(n.getId());\r
- }\r
- inner.createCriteria("taxonomicScope").add(Restrictions.in("id", taxonomicScopeIds));\r
- }\r
-\r
- if(geoScopes != null && !geoScopes.isEmpty()) {\r
- Set<Integer> geoScopeIds = new HashSet<Integer>();\r
- for(NamedArea n : geoScopes) {\r
- geoScopeIds.add(n.getId());\r
- }\r
- inner.createCriteria("geographicalScope").add(Restrictions.in("id", geoScopeIds));\r
- }\r
-\r
- inner.setProjection(Projections.distinct(Projections.id()));\r
-\r
- Criteria criteria = getSession().createCriteria(MediaKey.class);\r
- criteria.add(Restrictions.in("id", (List<Integer>)inner.list()));\r
-\r
- if(pageSize != null) {\r
- criteria.setMaxResults(pageSize);\r
- if(pageNumber != null) {\r
- criteria.setFirstResult(pageNumber * pageSize);\r
- }\r
- }\r
-\r
- List<MediaKey> results = (List<MediaKey>)criteria.list();\r
-\r
- defaultBeanInitializer.initializeAll(results, propertyPaths);\r
-\r
- return results;\r
- } else {\r
- if((taxonomicScope == null || taxonomicScope.isEmpty()) && (geoScopes == null || geoScopes.isEmpty())) {\r
- AuditQuery query = getAuditReader().createQuery().forEntitiesAtRevision(MediaKey.class,auditEvent.getRevisionNumber());\r
- \r
- if(pageSize != null) {\r
- query.setMaxResults(pageSize);\r
- if(pageNumber != null) {\r
- query.setFirstResult(pageNumber * pageSize);\r
- } else {\r
- query.setFirstResult(0);\r
- }\r
- }\r
- List<MediaKey> results = (List<MediaKey>)query.getResultList();\r
- defaultBeanInitializer.initializeAll(results, propertyPaths);\r
- return results;\r
- } else {\r
- throw new OperationNotSupportedInPriorViewException("getMediaKeys(Set<Taxon> taxonomicScope, Set<NamedArea> geoScopes, Integer pageSize, Integer pageNumber, List<String> propertyPaths)");\r
- }\r
- }\r
- }\r
- \r
- public List<Rights> getRights(Media media, Integer pageSize, Integer pageNumber, List<String> propertyPaths) {\r
- checkNotInPriorView("MediaDaoHibernateImpl.getRights(Media t, Integer pageSize, Integer pageNumber, List<String> propertyPaths)");\r
- Query query = getSession().createQuery("select rights from Media media join media.rights rights where media = :media");\r
- query.setParameter("media",media);\r
- setPagingParameter(query, pageSize, pageNumber);\r
- List<Rights> results = (List<Rights>)query.list();\r
- defaultBeanInitializer.initializeAll(results, propertyPaths);\r
- return results;\r
- }\r
- \r
- public int countRights(Media media) {\r
- checkNotInPriorView("MediaDaoHibernateImpl.countRights(Media t)");\r
- Query query = getSession().createQuery("select count(rights) from Media media join media.rights rights where media = :media");\r
- query.setParameter("media",media);\r
- return ((Long)query.uniqueResult()).intValue();\r
- }\r
-\r
- \r
-\r
- @Override\r
- public void rebuildIndex() {\r
- FullTextSession fullTextSession = Search.getFullTextSession(getSession());\r
- \r
- for(Media media : list(null,null)) { // re-index all media\r
- Hibernate.initialize(media.getTitle());\r
- Hibernate.initialize(media.getAllDescriptions());\r
- Hibernate.initialize(media.getArtist());\r
- fullTextSession.index(media);\r
- }\r
- fullTextSession.flushToIndexes();\r
- }\r
- \r
-}\r
+/**
+* Copyright (C) 2008 EDIT
+* European Distributed Institute of Taxonomy
+* http://www.e-taxonomy.eu
+*/
+
+package eu.etaxonomy.cdm.persistence.dao.hibernate.media;
+
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+
+import org.hibernate.Criteria;
+import org.hibernate.Hibernate;
+import org.hibernate.Query;
+import org.hibernate.criterion.Projections;
+import org.hibernate.criterion.Restrictions;
+import org.hibernate.envers.query.AuditEntity;
+import org.hibernate.envers.query.AuditQuery;
+import org.hibernate.search.FullTextSession;
+import org.hibernate.search.Search;
+import org.springframework.stereotype.Repository;
+
+import eu.etaxonomy.cdm.model.description.MediaKey;
+import eu.etaxonomy.cdm.model.location.NamedArea;
+import eu.etaxonomy.cdm.model.media.Media;
+import eu.etaxonomy.cdm.model.media.Rights;
+import eu.etaxonomy.cdm.model.molecular.PhylogeneticTree;
+import eu.etaxonomy.cdm.model.taxon.Taxon;
+import eu.etaxonomy.cdm.model.view.AuditEvent;
+import eu.etaxonomy.cdm.persistence.dao.common.OperationNotSupportedInPriorViewException;
+import eu.etaxonomy.cdm.persistence.dao.hibernate.common.IdentifiableDaoBase;
+import eu.etaxonomy.cdm.persistence.dao.media.IMediaDao;
+
+/**
+ * @author a.babadshanjan
+ * @created 08.09.2008
+ */
+@Repository
+public class MediaDaoHibernateImpl extends IdentifiableDaoBase<Media> implements IMediaDao {
+
+ protected String getDefaultField() {
+ return "title.text";
+ }
+
+ public MediaDaoHibernateImpl() {
+ super(Media.class);
+ indexedClasses = new Class[3];
+ indexedClasses[0] = Media.class;
+ indexedClasses[1] = MediaKey.class;
+ indexedClasses[2] = PhylogeneticTree.class;
+ }
+
+ @Override
+ public int countMediaKeys(Set<Taxon> taxonomicScope, Set<NamedArea> geoScopes) {
+ AuditEvent auditEvent = getAuditEventFromContext();
+ if(auditEvent.equals(AuditEvent.CURRENT_VIEW)) {
+ Criteria criteria = getSession().createCriteria(MediaKey.class);
+
+ if(taxonomicScope != null && !taxonomicScope.isEmpty()) {
+ Set<Integer> taxonomicScopeIds = new HashSet<Integer>();
+ for(Taxon n : taxonomicScope) {
+ taxonomicScopeIds.add(n.getId());
+ }
+ criteria.createCriteria("taxonomicScope").add(Restrictions.in("id", taxonomicScopeIds));
+ }
+
+ if(geoScopes != null && !geoScopes.isEmpty()) {
+ Set<Integer> geoScopeIds = new HashSet<Integer>();
+ for(NamedArea n : geoScopes) {
+ geoScopeIds.add(n.getId());
+ }
+ criteria.createCriteria("geographicalScope").add(Restrictions.in("id", geoScopeIds));
+ }
+
+ criteria.setProjection(Projections.countDistinct("id"));
+
+ return ((Number)criteria.uniqueResult()).intValue();
+ } else {
+ if((taxonomicScope == null || taxonomicScope.isEmpty()) && (geoScopes == null || geoScopes.isEmpty())) {
+ AuditQuery query = getAuditReader().createQuery().forEntitiesAtRevision(MediaKey.class,auditEvent.getRevisionNumber());
+ query.addProjection(AuditEntity.id().countDistinct());
+ return ((Long)query.getSingleResult()).intValue();
+ } else {
+ throw new OperationNotSupportedInPriorViewException("countMediaKeys(Set<Taxon> taxonomicScope, Set<NamedArea> geoScopes)");
+ }
+ }
+ }
+
+ @Override
+ public List<MediaKey> getMediaKeys(Set<Taxon> taxonomicScope, Set<NamedArea> geoScopes, Integer pageSize, Integer pageNumber, List<String> propertyPaths) {
+ AuditEvent auditEvent = getAuditEventFromContext();
+ if(auditEvent.equals(AuditEvent.CURRENT_VIEW)) {
+ Criteria inner = getSession().createCriteria(MediaKey.class);
+
+ if(taxonomicScope != null && !taxonomicScope.isEmpty()) {
+ Set<Integer> taxonomicScopeIds = new HashSet<Integer>();
+ for(Taxon n : taxonomicScope) {
+ taxonomicScopeIds.add(n.getId());
+ }
+ inner.createCriteria("taxonomicScope").add(Restrictions.in("id", taxonomicScopeIds));
+ }
+
+ if(geoScopes != null && !geoScopes.isEmpty()) {
+ Set<Integer> geoScopeIds = new HashSet<Integer>();
+ for(NamedArea n : geoScopes) {
+ geoScopeIds.add(n.getId());
+ }
+ inner.createCriteria("geographicalScope").add(Restrictions.in("id", geoScopeIds));
+ }
+
+ inner.setProjection(Projections.distinct(Projections.id()));
+
+ Criteria criteria = getSession().createCriteria(MediaKey.class);
+ criteria.add(Restrictions.in("id", inner.list()));
+
+ if(pageSize != null) {
+ criteria.setMaxResults(pageSize);
+ if(pageNumber != null) {
+ criteria.setFirstResult(pageNumber * pageSize);
+ }
+ }
+
+ List<MediaKey> results = criteria.list();
+
+ defaultBeanInitializer.initializeAll(results, propertyPaths);
+
+ return results;
+ } else {
+ if((taxonomicScope == null || taxonomicScope.isEmpty()) && (geoScopes == null || geoScopes.isEmpty())) {
+ AuditQuery query = getAuditReader().createQuery().forEntitiesAtRevision(MediaKey.class,auditEvent.getRevisionNumber());
+
+ if(pageSize != null) {
+ query.setMaxResults(pageSize);
+ if(pageNumber != null) {
+ query.setFirstResult(pageNumber * pageSize);
+ } else {
+ query.setFirstResult(0);
+ }
+ }
+ List<MediaKey> results = query.getResultList();
+ defaultBeanInitializer.initializeAll(results, propertyPaths);
+ return results;
+ } else {
+ throw new OperationNotSupportedInPriorViewException("getMediaKeys(Set<Taxon> taxonomicScope, Set<NamedArea> geoScopes, Integer pageSize, Integer pageNumber, List<String> propertyPaths)");
+ }
+ }
+ }
+
+ @Override
+ public List<Rights> getRights(Media media, Integer pageSize, Integer pageNumber, List<String> propertyPaths) {
+ checkNotInPriorView("MediaDaoHibernateImpl.getRights(Media t, Integer pageSize, Integer pageNumber, List<String> propertyPaths)");
+ Query query = getSession().createQuery("select rights from Media media join media.rights rights where media = :media");
+ query.setParameter("media",media);
+ setPagingParameter(query, pageSize, pageNumber);
+ List<Rights> results = query.list();
+ defaultBeanInitializer.initializeAll(results, propertyPaths);
+ return results;
+ }
+
+ @Override
+ public int countRights(Media media) {
+ checkNotInPriorView("MediaDaoHibernateImpl.countRights(Media t)");
+ Query query = getSession().createQuery("select count(rights) from Media media join media.rights rights where media = :media");
+ query.setParameter("media",media);
+ return ((Long)query.uniqueResult()).intValue();
+ }
+
+
+
+ @Override
+ public void rebuildIndex() {
+ FullTextSession fullTextSession = Search.getFullTextSession(getSession());
+
+ for(Media media : list(null,null)) { // re-index all media
+ Hibernate.initialize(media.getTitle());
+ Hibernate.initialize(media.getAllDescriptions());
+ Hibernate.initialize(media.getArtist());
+ fullTextSession.index(media);
+ }
+ fullTextSession.flushToIndexes();
+ }
+
+}
-/**\r
- * Copyright (C) 2007 EDIT\r
- * European Distributed Institute of Taxonomy\r
- * http://www.e-taxonomy.eu\r
- *\r
- * The contents of this file are subject to the Mozilla Public License Version 1.1\r
- * See LICENSE.TXT at the top of this package for the full license terms.\r
- *\r
- */\r
-package eu.etaxonomy.cdm.persistence.dao.hibernate.name;\r
-\r
-import java.util.ArrayList;\r
-import java.util.Collections;\r
-import java.util.HashMap;\r
-import java.util.Iterator;\r
-import java.util.List;\r
-import java.util.Set;\r
-import java.util.UUID;\r
-\r
-import org.apache.log4j.Logger;\r
-import org.hibernate.Criteria;\r
-import org.hibernate.Query;\r
-import org.hibernate.SQLQuery;\r
-import org.hibernate.criterion.Criterion;\r
-import org.hibernate.criterion.Order;\r
-import org.hibernate.criterion.Projections;\r
-import org.hibernate.criterion.Restrictions;\r
-import org.hibernate.envers.query.AuditEntity;\r
-import org.hibernate.envers.query.AuditQuery;\r
-import org.springframework.beans.factory.annotation.Autowired;\r
-import org.springframework.beans.factory.annotation.Qualifier;\r
-import org.springframework.stereotype.Repository;\r
-\r
-\r
-import eu.etaxonomy.cdm.model.common.CdmBase;\r
-import eu.etaxonomy.cdm.model.common.RelationshipBase;\r
-import eu.etaxonomy.cdm.model.name.BacterialName;\r
-import eu.etaxonomy.cdm.model.name.BotanicalName;\r
-import eu.etaxonomy.cdm.model.name.CultivarPlantName;\r
-import eu.etaxonomy.cdm.model.name.HomotypicalGroup;\r
-import eu.etaxonomy.cdm.model.name.HybridRelationship;\r
-import eu.etaxonomy.cdm.model.name.HybridRelationshipType;\r
-import eu.etaxonomy.cdm.model.name.NameRelationship;\r
-import eu.etaxonomy.cdm.model.name.NameRelationshipType;\r
-import eu.etaxonomy.cdm.model.name.NonViralName;\r
-import eu.etaxonomy.cdm.model.name.Rank;\r
-import eu.etaxonomy.cdm.model.name.SpecimenTypeDesignationStatus;\r
-import eu.etaxonomy.cdm.model.name.TaxonNameBase;\r
-import eu.etaxonomy.cdm.model.name.TypeDesignationBase;\r
-import eu.etaxonomy.cdm.model.name.TypeDesignationStatusBase;\r
-import eu.etaxonomy.cdm.model.name.ViralName;\r
-import eu.etaxonomy.cdm.model.name.ZoologicalName;\r
-import eu.etaxonomy.cdm.model.taxon.TaxonBase;\r
-import eu.etaxonomy.cdm.model.view.AuditEvent;\r
-import eu.etaxonomy.cdm.persistence.dao.hibernate.common.IdentifiableDaoBase;\r
-import eu.etaxonomy.cdm.persistence.dao.name.IHomotypicalGroupDao;\r
-import eu.etaxonomy.cdm.persistence.dao.name.ITaxonNameDao;\r
-import eu.etaxonomy.cdm.persistence.dao.taxon.ITaxonDao;\r
-import eu.etaxonomy.cdm.persistence.dto.UuidAndTitleCache;\r
-import eu.etaxonomy.cdm.persistence.query.MatchMode;\r
-import eu.etaxonomy.cdm.persistence.query.OrderHint;\r
-\r
-/**\r
- * @author a.mueller\r
- *\r
- */\r
-@Repository\r
-@Qualifier("taxonNameDaoHibernateImpl")\r
-public class TaxonNameDaoHibernateImpl extends IdentifiableDaoBase<TaxonNameBase> implements ITaxonNameDao {\r
-\r
- private static final Logger logger = Logger.getLogger(TaxonNameDaoHibernateImpl.class);\r
-\r
- @Autowired\r
- private ITaxonDao taxonDao;\r
-\r
- @Autowired\r
- \r
- private IHomotypicalGroupDao homotypicalGroupDao;\r
-\r
- public TaxonNameDaoHibernateImpl() {\r
- super(TaxonNameBase.class);\r
- indexedClasses = new Class[6];\r
- indexedClasses[0] = BacterialName.class;\r
- indexedClasses[1] = BotanicalName.class;\r
- indexedClasses[2] = CultivarPlantName.class;\r
- indexedClasses[3] = NonViralName.class;\r
- indexedClasses[4] = ViralName.class;\r
- indexedClasses[5] = ZoologicalName.class;\r
- }\r
-\r
- @Override\r
- public int countHybridNames(NonViralName name, HybridRelationshipType type) {\r
- AuditEvent auditEvent = getAuditEventFromContext();\r
- if(auditEvent.equals(AuditEvent.CURRENT_VIEW)) {\r
- Query query = null;\r
- if(type == null) {\r
- query = getSession().createQuery("select count(relation) from HybridRelationship relation where relation.relatedFrom = :name");\r
- } else {\r
- query = getSession().createQuery("select count(relation) from HybridRelationship relation where relation.relatedFrom = :name and relation.type = :type");\r
- query.setParameter("type", type);\r
- }\r
- query.setParameter("name",name);\r
- return ((Long)query.uniqueResult()).intValue();\r
- } else {\r
- AuditQuery query = getAuditReader().createQuery().forEntitiesAtRevision(HybridRelationship.class,auditEvent.getRevisionNumber());\r
- query.add(AuditEntity.relatedId("relatedFrom").eq(name.getId()));\r
- query.addProjection(AuditEntity.id().count("id"));\r
-\r
- if(type != null) {\r
- query.add(AuditEntity.relatedId("type").eq(type.getId()));\r
- }\r
-\r
- return ((Long)query.getSingleResult()).intValue();\r
- }\r
- }\r
-\r
- @Override\r
- public int countNames(String queryString) {\r
- checkNotInPriorView("TaxonNameDaoHibernateImpl.countNames(String queryString)");\r
- Criteria criteria = getSession().createCriteria(TaxonNameBase.class);\r
-\r
- if (queryString != null) {\r
- criteria.add(Restrictions.ilike("nameCache", queryString));\r
- }\r
- criteria.setProjection(Projections.projectionList().add(Projections.rowCount()));\r
-\r
- return ((Number)criteria.uniqueResult()).intValue();\r
- }\r
-\r
- @Override\r
- public int countNames(String queryString, MatchMode matchMode, List<Criterion> criteria) {\r
-\r
- Criteria crit = getSession().createCriteria(type);\r
- if (matchMode == MatchMode.EXACT) {\r
- crit.add(Restrictions.eq("nameCache", matchMode.queryStringFrom(queryString)));\r
- } else {\r
- crit.add(Restrictions.ilike("nameCache", matchMode.queryStringFrom(queryString)));\r
- }\r
- if(criteria != null) {\r
- for (Criterion criterion : criteria) {\r
- crit.add(criterion);\r
- }\r
- }\r
-\r
- crit.setProjection(Projections.projectionList().add(Projections.rowCount()));\r
- return ((Number)crit.uniqueResult()).intValue();\r
- }\r
-\r
- @Override\r
- public int countNames(String genusOrUninomial, String infraGenericEpithet, String specificEpithet, String infraSpecificEpithet, Rank rank) {\r
- AuditEvent auditEvent = getAuditEventFromContext();\r
- if(auditEvent.equals(AuditEvent.CURRENT_VIEW)) {\r
- Criteria criteria = getSession().createCriteria(TaxonNameBase.class);\r
-\r
- /**\r
- * Given HHH-2951 - "Restrictions.eq when passed null, should create a NullRestriction"\r
- * We need to convert nulls to NullRestrictions for now\r
- */\r
- if(genusOrUninomial != null) {\r
- criteria.add(Restrictions.eq("genusOrUninomial",genusOrUninomial));\r
- } else {\r
- criteria.add(Restrictions.isNull("genusOrUninomial"));\r
- }\r
-\r
- if(infraGenericEpithet != null) {\r
- criteria.add(Restrictions.eq("infraGenericEpithet", infraGenericEpithet));\r
- } else {\r
- criteria.add(Restrictions.isNull("infraGenericEpithet"));\r
- }\r
-\r
- if(specificEpithet != null) {\r
- criteria.add(Restrictions.eq("specificEpithet", specificEpithet));\r
- } else {\r
- criteria.add(Restrictions.isNull("specificEpithet"));\r
- }\r
-\r
- if(infraSpecificEpithet != null) {\r
- criteria.add(Restrictions.eq("infraSpecificEpithet",infraSpecificEpithet));\r
- } else {\r
- criteria.add(Restrictions.isNull("infraSpecificEpithet"));\r
- }\r
-\r
- if(rank != null) {\r
- criteria.add(Restrictions.eq("rank", rank));\r
- }\r
-\r
- criteria.setProjection(Projections.rowCount());\r
- return ((Number)criteria.uniqueResult()).intValue();\r
- } else {\r
- AuditQuery query = getAuditReader().createQuery().forEntitiesAtRevision(TaxonNameBase.class,auditEvent.getRevisionNumber());\r
-\r
- if(genusOrUninomial != null) {\r
- query.add(AuditEntity.property("genusOrUninomial").eq(genusOrUninomial));\r
- } else {\r
- query.add(AuditEntity.property("genusOrUninomial").isNull());\r
- }\r
-\r
- if(infraGenericEpithet != null) {\r
- query.add(AuditEntity.property("infraGenericEpithet").eq(infraGenericEpithet));\r
- } else {\r
- query.add(AuditEntity.property("infraGenericEpithet").isNull());\r
- }\r
-\r
- if(specificEpithet != null) {\r
- query.add(AuditEntity.property("specificEpithet").eq(specificEpithet));\r
- } else {\r
- query.add(AuditEntity.property("specificEpithet").isNull());\r
- }\r
-\r
- if(infraSpecificEpithet != null) {\r
- query.add(AuditEntity.property("infraSpecificEpithet").eq(infraSpecificEpithet));\r
- } else {\r
- query.add(AuditEntity.property("infraSpecificEpithet").isNull());\r
- }\r
-\r
- if(rank != null) {\r
- query.add(AuditEntity.relatedId("rank").eq(rank.getId()));\r
- }\r
-\r
- query.addProjection(AuditEntity.id().count("id"));\r
- return ((Long)query.getSingleResult()).intValue();\r
- }\r
- }\r
-\r
- @Override\r
- public int countNameRelationships(TaxonNameBase name, NameRelationship.Direction direction, NameRelationshipType type) {\r
-\r
- AuditEvent auditEvent = getAuditEventFromContext();\r
- if(auditEvent.equals(AuditEvent.CURRENT_VIEW)) {\r
- Query query = null;\r
- if(type == null) {\r
- query = getSession().createQuery("select count(relation) from NameRelationship relation where relation." + direction +" = :name");\r
- } else {\r
- query = getSession().createQuery("select count(relation) from NameRelationship relation where relation." + direction +" = :name and relation.type = :type");\r
- query.setParameter("type", type);\r
- }\r
- query.setParameter("name",name);\r
- return ((Long)query.uniqueResult()).intValue();\r
- } else {\r
- AuditQuery query = getAuditReader().createQuery().forEntitiesAtRevision(NameRelationship.class,auditEvent.getRevisionNumber());\r
- query.add(AuditEntity.relatedId(direction.toString()).eq(name.getId()));\r
- query.addProjection(AuditEntity.id().count("id"));\r
-\r
- if(type != null) {\r
- query.add(AuditEntity.relatedId("type").eq(type.getId()));\r
- }\r
-\r
- return ((Long)query.getSingleResult()).intValue();\r
- }\r
- }\r
-\r
-\r
- @Override\r
- public int countTypeDesignations(TaxonNameBase name, SpecimenTypeDesignationStatus status) {\r
- checkNotInPriorView("countTypeDesignations(TaxonNameBase name, SpecimenTypeDesignationStatus status)");\r
- Query query = null;\r
- if(status == null) {\r
- query = getSession().createQuery("select count(designation) from TypeDesignationBase designation join designation.typifiedNames name where name = :name");\r
- } else {\r
- query = getSession().createQuery("select count(designation) from TypeDesignationBase designation join designation.typifiedNames name where name = :name and designation.typeStatus = :status");\r
- query.setParameter("status", status);\r
- }\r
- query.setParameter("name",name);\r
- return ((Long)query.uniqueResult()).intValue();\r
- }\r
-\r
- @Override\r
- public List<HybridRelationship> getHybridNames(NonViralName name, HybridRelationshipType type, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) {\r
- AuditEvent auditEvent = getAuditEventFromContext();\r
- if(auditEvent.equals(AuditEvent.CURRENT_VIEW)) {\r
- Criteria criteria = getSession().createCriteria(HybridRelationship.class);\r
- criteria.add(Restrictions.eq("relatedFrom", name));\r
- if(type != null) {\r
- criteria.add(Restrictions.eq("type", type));\r
- }\r
-\r
- if(pageSize != null) {\r
- criteria.setMaxResults(pageSize);\r
- if(pageNumber != null) {\r
- criteria.setFirstResult(pageNumber * pageSize);\r
- } else {\r
- criteria.setFirstResult(0);\r
- }\r
- }\r
-\r
- addOrder(criteria, orderHints);\r
-\r
- List<HybridRelationship> results = criteria.list();\r
- defaultBeanInitializer.initializeAll(results, propertyPaths);\r
- return results;\r
- } else {\r
- AuditQuery query = getAuditReader().createQuery().forEntitiesAtRevision(HybridRelationship.class,auditEvent.getRevisionNumber());\r
- query.add(AuditEntity.relatedId("relatedFrom").eq(name.getId()));\r
-\r
- if(type != null) {\r
- query.add(AuditEntity.relatedId("type").eq(type.getId()));\r
- }\r
-\r
- if(pageSize != null) {\r
- query.setMaxResults(pageSize);\r
- if(pageNumber != null) {\r
- query.setFirstResult(pageNumber * pageSize);\r
- } else {\r
- query.setFirstResult(0);\r
- }\r
- }\r
-\r
- List<HybridRelationship> results = query.getResultList();\r
- defaultBeanInitializer.initializeAll(results, propertyPaths);\r
- return results;\r
- }\r
- }\r
-\r
- @Override\r
- public List<NameRelationship> getNameRelationships(TaxonNameBase name, NameRelationship.Direction direction,\r
- NameRelationshipType type, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints,\r
- List<String> propertyPaths) {\r
-\r
- AuditEvent auditEvent = getAuditEventFromContext();\r
- if(auditEvent.equals(AuditEvent.CURRENT_VIEW)) {\r
- Criteria criteria = getSession().createCriteria(NameRelationship.class);\r
- if (name != null || direction != null){\r
- criteria.add(Restrictions.eq(direction.toString(), name));\r
- }\r
- if(type != null) {\r
- criteria.add(Restrictions.eq("type", type));\r
- }\r
-\r
- if(pageSize != null) {\r
- criteria.setMaxResults(pageSize);\r
- if(pageNumber != null) {\r
- criteria.setFirstResult(pageNumber * pageSize);\r
- } else {\r
- criteria.setFirstResult(0);\r
- }\r
- }\r
- addOrder(criteria, orderHints);\r
-\r
- List<NameRelationship> results = criteria.list();\r
- defaultBeanInitializer.initializeAll(results, propertyPaths);\r
- return results;\r
- } else {\r
- AuditQuery query = getAuditReader().createQuery().forEntitiesAtRevision(NameRelationship.class,auditEvent.getRevisionNumber());\r
- query.add(AuditEntity.relatedId(direction.toString()).eq(name.getId()));\r
-\r
- if(type != null) {\r
- query.add(AuditEntity.relatedId("type").eq(type.getId()));\r
- }\r
-\r
- if(pageSize != null) {\r
- query.setMaxResults(pageSize);\r
- if(pageNumber != null) {\r
- query.setFirstResult(pageNumber * pageSize);\r
- } else {\r
- query.setFirstResult(0);\r
- }\r
- }\r
-\r
- List<NameRelationship> results = query.getResultList();\r
- defaultBeanInitializer.initializeAll(results, propertyPaths);\r
- return results;\r
- }\r
- }\r
-\r
- @Override\r
- public List<TypeDesignationBase> getTypeDesignations(TaxonNameBase name, TypeDesignationStatusBase status, Integer pageSize, Integer pageNumber, List<String> propertyPaths){\r
- return getTypeDesignations(name, null, status, pageSize, pageNumber, propertyPaths);\r
- }\r
-\r
- @Override\r
- public <T extends TypeDesignationBase> List<T> getTypeDesignations(TaxonNameBase name,\r
- Class<T> type,\r
- TypeDesignationStatusBase status, Integer pageSize, Integer pageNumber,\r
- List<String> propertyPaths){\r
- checkNotInPriorView("getTypeDesignations(TaxonNameBase name,TypeDesignationStatusBase status, Integer pageSize, Integer pageNumber, List<String> propertyPaths)");\r
- Query query = null;\r
- String queryString = "select designation from TypeDesignationBase designation join designation.typifiedNames name where name = :name";\r
-\r
- if(status != null) {\r
- queryString += " and designation.typeStatus = :status";\r
- }\r
- if(type != null){\r
- queryString += " and designation.class = :type";\r
- }\r
-\r
- query = getSession().createQuery(queryString);\r
-\r
- if(status != null) {\r
- query.setParameter("status", status);\r
- }\r
- if(type != null){\r
- query.setParameter("type", type.getSimpleName());\r
- }\r
-\r
- query.setParameter("name",name);\r
-\r
- if(pageSize != null) {\r
- query.setMaxResults(pageSize);\r
- if(pageNumber != null) {\r
- query.setFirstResult(pageNumber * pageSize);\r
- } else {\r
- query.setFirstResult(0);\r
- }\r
- }\r
- return defaultBeanInitializer.initializeAll((List<T>)query.list(), propertyPaths);\r
- }\r
-\r
-\r
- public List<TaxonNameBase<?,?>> searchNames(String queryString, MatchMode matchMode, Integer pageSize, Integer pageNumber) {\r
- checkNotInPriorView("TaxonNameDaoHibernateImpl.searchNames(String queryString, Integer pageSize, Integer pageNumber)");\r
- Criteria criteria = getSession().createCriteria(TaxonNameBase.class);\r
-\r
- if (queryString != null) {\r
- criteria.add(Restrictions.ilike("nameCache", queryString));\r
- }\r
- if(pageSize != null) {\r
- criteria.setMaxResults(pageSize);\r
- if(pageNumber != null) {\r
- criteria.setFirstResult(pageNumber * pageSize);\r
- } else {\r
- criteria.setFirstResult(0);\r
- }\r
- }\r
- List<TaxonNameBase<?,?>> results = criteria.list();\r
- return results;\r
- }\r
-\r
-\r
- @Override\r
- public List<TaxonNameBase<?,?>> searchNames(String queryString, Integer pageSize, Integer pageNumber) {\r
- return searchNames(queryString, MatchMode.BEGINNING, pageSize, pageNumber);\r
- }\r
-\r
-\r
- @Override\r
- public List<TaxonNameBase> searchNames(String genusOrUninomial,String infraGenericEpithet, String specificEpithet, String infraSpecificEpithet, Rank rank, Integer pageSize,Integer pageNumber, List<OrderHint> orderHints,\r
- List<String> propertyPaths) {\r
- AuditEvent auditEvent = getAuditEventFromContext();\r
- if(auditEvent.equals(AuditEvent.CURRENT_VIEW)) {\r
- Criteria criteria = getSession().createCriteria(TaxonNameBase.class);\r
-\r
- /**\r
- * Given HHH-2951 - "Restrictions.eq when passed null, should create a NullRestriction"\r
- * We need to convert nulls to NullRestrictions for now\r
- */\r
- if(genusOrUninomial != null) {\r
- criteria.add(Restrictions.eq("genusOrUninomial",genusOrUninomial));\r
- } else {\r
- criteria.add(Restrictions.isNull("genusOrUninomial"));\r
- }\r
-\r
- if(infraGenericEpithet != null) {\r
- criteria.add(Restrictions.eq("infraGenericEpithet", infraGenericEpithet));\r
- } else {\r
- criteria.add(Restrictions.isNull("infraGenericEpithet"));\r
- }\r
-\r
- if(specificEpithet != null) {\r
- criteria.add(Restrictions.eq("specificEpithet", specificEpithet));\r
- } else {\r
- criteria.add(Restrictions.isNull("specificEpithet"));\r
- }\r
-\r
- if(infraSpecificEpithet != null) {\r
- criteria.add(Restrictions.eq("infraSpecificEpithet",infraSpecificEpithet));\r
- } else {\r
- criteria.add(Restrictions.isNull("infraSpecificEpithet"));\r
- }\r
-\r
- if(rank != null) {\r
- criteria.add(Restrictions.eq("rank", rank));\r
- }\r
-\r
- if(pageSize != null) {\r
- criteria.setMaxResults(pageSize);\r
- if(pageNumber != null) {\r
- criteria.setFirstResult(pageNumber * pageSize);\r
- } else {\r
- criteria.setFirstResult(0);\r
- }\r
- }\r
-\r
- addOrder(criteria, orderHints);\r
-\r
- List<TaxonNameBase> results = criteria.list();\r
- defaultBeanInitializer.initializeAll(results, propertyPaths);\r
- return results;\r
- } else {\r
- AuditQuery query = getAuditReader().createQuery().forEntitiesAtRevision(TaxonNameBase.class,auditEvent.getRevisionNumber());\r
-\r
- if(genusOrUninomial != null) {\r
- query.add(AuditEntity.property("genusOrUninomial").eq(genusOrUninomial));\r
- } else {\r
- query.add(AuditEntity.property("genusOrUninomial").isNull());\r
- }\r
-\r
- if(infraGenericEpithet != null) {\r
- query.add(AuditEntity.property("infraGenericEpithet").eq(infraGenericEpithet));\r
- } else {\r
- query.add(AuditEntity.property("infraGenericEpithet").isNull());\r
- }\r
-\r
- if(specificEpithet != null) {\r
- query.add(AuditEntity.property("specificEpithet").eq(specificEpithet));\r
- } else {\r
- query.add(AuditEntity.property("specificEpithet").isNull());\r
- }\r
-\r
- if(infraSpecificEpithet != null) {\r
- query.add(AuditEntity.property("infraSpecificEpithet").eq(infraSpecificEpithet));\r
- } else {\r
- query.add(AuditEntity.property("infraSpecificEpithet").isNull());\r
- }\r
-\r
- if(rank != null) {\r
- query.add(AuditEntity.relatedId("rank").eq(rank.getId()));\r
- }\r
-\r
- if(pageSize != null) {\r
- query.setMaxResults(pageSize);\r
- if(pageNumber != null) {\r
- query.setFirstResult(pageNumber * pageSize);\r
- } else {\r
- query.setFirstResult(0);\r
- }\r
- }\r
-\r
- List<TaxonNameBase> results = query.getResultList();\r
- defaultBeanInitializer.initializeAll(results, propertyPaths);\r
- return results;\r
- }\r
- }\r
-\r
- @Override\r
- public List<? extends TaxonNameBase<?,?>> findByName(String queryString,\r
- MatchMode matchmode, Integer pageSize, Integer pageNumber, List<Criterion> criteria, List<String> propertyPaths) {\r
-\r
- Criteria crit = getSession().createCriteria(type);\r
- if (matchmode == MatchMode.EXACT) {\r
- crit.add(Restrictions.eq("nameCache", matchmode.queryStringFrom(queryString)));\r
- } else {\r
- crit.add(Restrictions.ilike("nameCache", matchmode.queryStringFrom(queryString)));\r
- }\r
- if(criteria != null){\r
- for (Criterion criterion : criteria) {\r
- crit.add(criterion);\r
- }\r
- }\r
- crit.addOrder(Order.asc("nameCache"));\r
-\r
- if(pageSize != null) {\r
- crit.setMaxResults(pageSize);\r
- if(pageNumber != null) {\r
- crit.setFirstResult(pageNumber * pageSize);\r
- }\r
- }\r
-\r
- List<? extends TaxonNameBase<?,?>> results = crit.list();\r
- defaultBeanInitializer.initializeAll(results, propertyPaths);\r
-\r
- return results;\r
- }\r
-\r
- @Override\r
- public List<? extends TaxonNameBase<?,?>> findByTitle(String queryString,\r
- MatchMode matchmode, Integer pageSize, Integer pageNumber, List<Criterion> criteria, List<String> propertyPaths) {\r
-\r
- Criteria crit = getSession().createCriteria(type);\r
- if (matchmode == MatchMode.EXACT) {\r
- crit.add(Restrictions.eq("titleCache", matchmode.queryStringFrom(queryString)));\r
- } else {\r
- crit.add(Restrictions.ilike("titleCache", matchmode.queryStringFrom(queryString)));\r
- }\r
- if(criteria != null){\r
- for (Criterion criterion : criteria) {\r
- crit.add(criterion);\r
- }\r
- }\r
- crit.addOrder(Order.asc("titleCache"));\r
-\r
- if(pageSize != null) {\r
- crit.setMaxResults(pageSize);\r
- if(pageNumber != null) {\r
- crit.setFirstResult(pageNumber * pageSize);\r
- }\r
- }\r
-\r
- List<? extends TaxonNameBase<?,?>> results = crit.list();\r
- defaultBeanInitializer.initializeAll(results, propertyPaths);\r
-\r
- return results;\r
- }\r
-\r
-\r
- @Override\r
- public TaxonNameBase<?,?> findByUuid(UUID uuid, List<Criterion> criteria, List<String> propertyPaths) {\r
-\r
- Criteria crit = getSession().createCriteria(type);\r
-\r
- if (uuid != null) {\r
- crit.add(Restrictions.eq("uuid", uuid));\r
- } else {\r
- logger.warn("UUID is NULL");\r
- return null;\r
- }\r
- if(criteria != null){\r
- for (Criterion criterion : criteria) {\r
- crit.add(criterion);\r
- }\r
- }\r
- crit.addOrder(Order.asc("uuid"));\r
-\r
- List<? extends TaxonNameBase<?,?>> results = crit.list();\r
- if (results.size() == 1) {\r
- defaultBeanInitializer.initializeAll(results, propertyPaths);\r
- TaxonNameBase<?, ?> taxonName = results.iterator().next();\r
- return taxonName;\r
- } else if (results.size() > 1) {\r
- logger.error("Multiple results for UUID: " + uuid);\r
- } else if (results.size() == 0) {\r
- logger.info("No results for UUID: " + uuid);\r
- }\r
-\r
- return null;\r
- }\r
-\r
- @Override\r
- public List<RelationshipBase> getAllRelationships(Integer limit, Integer start) {\r
- AuditEvent auditEvent = getAuditEventFromContext();\r
- if(auditEvent.equals(AuditEvent.CURRENT_VIEW)) {\r
- // for some reason the HQL .class discriminator didn't work here so I created this preliminary\r
- // implementation for now. Should be cleaned in future.\r
-\r
- List<RelationshipBase> result = new ArrayList<RelationshipBase>();\r
-\r
- int nameRelSize = countAllRelationships(NameRelationship.class);\r
- if (nameRelSize > start){\r
-\r
- String hql = " FROM %s as rb ORDER BY rb.id ";\r
- hql = String.format(hql, NameRelationship.class.getSimpleName());\r
- Query query = getSession().createQuery(hql);\r
- query.setFirstResult(start);\r
- if (limit != null){\r
- query.setMaxResults(limit);\r
- }\r
- result = query.list();\r
- }\r
- limit = limit - result.size();\r
- if (limit > 0){\r
- String hql = " FROM HybridRelationship as rb ORDER BY rb.id ";\r
- hql = String.format(hql, HybridRelationship.class.getSimpleName());\r
- Query query = getSession().createQuery(hql);\r
- start = (nameRelSize > start) ? 0 : (start - nameRelSize);\r
- query.setFirstResult(start);\r
- if (limit != null){\r
- query.setMaxResults(limit);\r
- }\r
- result.addAll( query.list());\r
- }\r
- return result;\r
- } else {\r
- AuditQuery query = getAuditReader().createQuery().forEntitiesAtRevision(RelationshipBase.class,auditEvent.getRevisionNumber());\r
- return query.getResultList();\r
- }\r
- }\r
-\r
-\r
- /**\r
- * TODO not yet in interface\r
- * @param clazz\r
- * @return\r
- */\r
- public int countAllRelationships(Class<? extends RelationshipBase> clazz) {\r
- if (clazz != null && ! NameRelationship.class.isAssignableFrom(clazz) && ! HybridRelationship.class.isAssignableFrom(clazz) ){\r
- throw new RuntimeException("Class must be assignable by a taxon or snonym relation");\r
- }\r
- int size = 0;\r
-\r
- if (clazz == null || NameRelationship.class.isAssignableFrom(clazz)){\r
- String hql = " SELECT count(rel) FROM NameRelationship rel";\r
- size += (Long)getSession().createQuery(hql).list().get(0);\r
- }\r
- if (clazz == null || HybridRelationship.class.isAssignableFrom(clazz)){\r
- String hql = " SELECT count(rel) FROM HybridRelationship rel";\r
- size += (Long)getSession().createQuery(hql).list().get(0);\r
- }\r
- return size;\r
- }\r
-\r
-\r
- @Override\r
- public Integer countByName(String queryString, MatchMode matchmode, List<Criterion> criteria) {\r
- //TODO improve performance\r
- List<? extends TaxonNameBase<?,?>> results = findByName(queryString, matchmode, null, null, criteria, null);\r
- return results.size();\r
-\r
- }\r
-\r
- @Override\r
- public List<UuidAndTitleCache> getUuidAndTitleCacheOfNames() {\r
- String queryString = "SELECT uuid, id, fullTitleCache FROM TaxonNameBase";\r
-\r
- @SuppressWarnings("unchecked")\r
- List<Object[]> result = getSession().createSQLQuery(queryString).list();\r
-\r
- if(result.size() == 0){\r
- return null;\r
- }else{\r
- List<UuidAndTitleCache> list = new ArrayList<UuidAndTitleCache>(result.size());\r
-\r
- for (Object object : result){\r
-\r
- Object[] objectArray = (Object[]) object;\r
-\r
- UUID uuid = UUID.fromString((String) objectArray[0]);\r
- Integer id = (Integer) objectArray[1];\r
- String titleCache = (String) objectArray[2];\r
-\r
- list.add(new UuidAndTitleCache(type, uuid, id, titleCache));\r
- }\r
-\r
- return list;\r
- }\r
- }\r
-\r
- @Override\r
- public Integer countByName(Class<? extends TaxonNameBase> clazz,String queryString, MatchMode matchmode, List<Criterion> criteria) {\r
- return super.countByParam(clazz, "nameCache", queryString, matchmode, criteria);\r
- }\r
-\r
- @Override\r
- public List<TaxonNameBase> findByName(Class<? extends TaxonNameBase> clazz, String queryString, MatchMode matchmode, List<Criterion> criteria,Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) {\r
- return super.findByParam(clazz, "nameCache", queryString, matchmode, criteria, pageSize, pageNumber, orderHints, propertyPaths);\r
- }\r
-\r
- @Override\r
- public UUID delete (TaxonNameBase persistentObject){\r
- Set<TaxonBase> taxonBases = persistentObject.getTaxonBases();\r
-\r
- if (persistentObject == null){\r
- logger.warn(type.getName() + " was 'null'");\r
- return null;\r
- }\r
- getSession().saveOrUpdate(persistentObject);\r
- UUID persUuid = persistentObject.getUuid();\r
- persistentObject = this.load(persUuid);\r
- UUID homotypicalGroupUUID = persistentObject.getHomotypicalGroup().getUuid();\r
-\r
-\r
- for (TaxonBase taxonBase: taxonBases){\r
- taxonDao.delete(taxonBase);\r
- }\r
- HomotypicalGroup homotypicalGroup = homotypicalGroupDao.load(homotypicalGroupUUID);\r
- if (homotypicalGroup != null){\r
- if (homotypicalGroup.getTypifiedNames().isEmpty()){\r
- homotypicalGroupDao.delete(homotypicalGroup);\r
- }\r
- }\r
-\r
- getSession().delete(persistentObject);\r
- return persistentObject.getUuid();\r
- }\r
-\r
-\r
- @Override\r
- public ZoologicalName findZoologicalNameByUUID(UUID uuid){\r
- Criteria criteria = getSession().createCriteria(type);\r
- if (uuid != null) {\r
- criteria.add(Restrictions.eq("uuid", uuid));\r
- } else {\r
- logger.warn("UUID is NULL");\r
- return null;\r
- }\r
-\r
- List<? extends TaxonNameBase<?,?>> results = criteria.list();\r
- if (results.size() == 1) {\r
- defaultBeanInitializer.initializeAll(results, null);\r
- TaxonNameBase<?, ?> taxonName = results.iterator().next();\r
- if (taxonName.isInstanceOf(ZoologicalName.class)) {\r
- ZoologicalName zoologicalName = CdmBase.deproxy(taxonName, ZoologicalName.class);\r
- return zoologicalName;\r
- } else {\r
- logger.warn("This UUID (" + uuid + ") does not belong to a ZoologicalName. It belongs to: " + taxonName.getUuid() + " (" + taxonName.getTitleCache() + ")");\r
- }\r
- } else if (results.size() > 1) {\r
- logger.error("Multiple results for UUID: " + uuid);\r
- } else if (results.size() == 0) {\r
- logger.info("No results for UUID: " + uuid);\r
- }\r
- return null;\r
- }\r
-@Override\r
-public List<HashMap<String,String>> getNameRecords(){\r
- String sql= "SELECT"\r
- + " (SELECT famName.namecache FROM TaxonNode famNode"\r
- + " LEFT OUTER JOIN TaxonBase famTax ON famNode.taxon_id = famTax.id"\r
- + " LEFT OUTER JOIN TaxonNameBase famName ON famTax.name_id = famName.id"\r
- + " WHERE famName.rank_id = 795 AND famNode.treeIndex = SUBSTRING(tn.treeIndex, 1, length(famNode.treeIndex))"\r
- + " ) as famName, "\r
- + " (SELECT famName.namecache FROM TaxonNode famNode "\r
- + " LEFT OUTER JOIN TaxonBase famTax ON famNode.taxon_id = famTax.id "\r
- + " LEFT OUTER JOIN TaxonNameBase famName ON famTax.name_id = famName.id "\r
- + " WHERE famName.rank_id = 795 AND famNode.treeIndex = SUBSTRING(tnAcc.treeIndex, 1, length(famNode.treeIndex))"\r
- + " ) as accFamName,tb.DTYPE, tb.id as TaxonID ,tb.titleCache taxonTitle, tnb.rank_id as RankID, tnb.id as NameID,"\r
- + " tnb.namecache as name, tnb.titleCache as nameAuthor, tnb.fullTitleCache nameAndNomRef,"\r
- + " r.titleCache as nomRef, r.abbrevTitle nomRefAbbrevTitle, r.title nomRefTitle, r.datepublished_start nomRefPublishedStart, r.datepublished_end nomRefPublishedEnd, r.pages nomRefPages, inRef.abbrevTitle inRefAbbrevTitle,tnb.nomenclaturalmicroreference as detail,"\r
- + " nameType.namecache nameType, nameType.titleCache nameTypeAuthor, nameType.fullTitleCache nameTypeFullTitle, nameTypeRef.titleCache nameTypeRef, inRef.seriespart as inRefSeries, inRef.datepublished_start inRefPublishedStart, inRef.datepublished_end inRefPublishedEnd, inRef.volume as inRefVolume"\r
- + " FROM TaxonBase tb"\r
- + " LEFT OUTER JOIN TaxonNameBase tnb ON tb.name_id = tnb.id"\r
- + " LEFT OUTER JOIN Reference r ON tnb.nomenclaturalreference_id = r.id"\r
- + " LEFT OUTER JOIN TaxonNode tn ON tn.taxon_id = tb.id"\r
- + " LEFT OUTER JOIN TaxonNameBase_TypeDesignationBase typeMN ON typeMN.TaxonNameBase_id = tnb.id"\r
- + " LEFT OUTER JOIN TypeDesignationBase tdb ON tdb.id = typeMN.typedesignations_id"\r
- + " LEFT OUTER JOIN TaxonNameBase nameType ON tdb.typename_id = nameType.id"\r
- + " LEFT OUTER JOIN Reference nameTypeRef ON nameType.nomenclaturalreference_id = nameTypeRef.id"\r
- + " LEFT OUTER JOIN Reference inRef ON inRef.id = r.inreference_id"\r
- + " LEFT OUTER JOIN SynonymRelationship sr ON tb.id = sr.relatedfrom_id"\r
- + " LEFT OUTER JOIN TaxonBase accT ON accT.id = sr.relatedto_id"\r
- + " LEFT OUTER JOIN TaxonNode tnAcc ON tnAcc.taxon_id = accT.id"\r
- + " ORDER BY DTYPE, famName, accFamName, tnb.rank_id ,tb.titleCache";\r
-\r
-\r
- SQLQuery query = getSession().createSQLQuery(sql);\r
- List result = query.list();\r
- //Delimiter used in CSV file\r
-\r
-\r
- List<HashMap<String,String>> nameRecords = new ArrayList();\r
- HashMap<String,String> nameRecord = new HashMap<String,String>();\r
- for(Object object : result)\r
- {\r
- Object[] row = (Object[])object;\r
- nameRecord = new HashMap<String,String>();\r
- nameRecord.put("famName",(String)row[0]);\r
- nameRecord.put("accFamName",(String)row[1]);\r
-\r
- nameRecord.put("DTYPE",(String)row[2]);\r
- nameRecord.put("TaxonID",String.valueOf(row[3]));\r
- nameRecord.put("taxonTitle",(String)row[4]);\r
- nameRecord.put("RankID",String.valueOf(row[5]));\r
- nameRecord.put("NameID",String.valueOf(row[6]));\r
- nameRecord.put("name",(String)row[7]);\r
- nameRecord.put("nameAuthor",(String)row[8]);\r
- nameRecord.put("nameAndNomRef",(String)row[9]);\r
- nameRecord.put("nomRef",(String)row[10]);\r
- nameRecord.put("nomRefAbbrevTitle",(String)row[11]);\r
- nameRecord.put("nomRefTitle",(String)row[12]);\r
- nameRecord.put("nomRefPublishedStart",(String)row[13]);\r
- nameRecord.put("nomRefPublishedEnd",(String)row[14]);\r
- nameRecord.put("nomRefPages",(String)row[15]);\r
- nameRecord.put("inRefAbbrevTitle",(String)row[16]);\r
- nameRecord.put("detail",(String)row[17]);\r
- nameRecord.put("nameType",(String)row[18]);\r
- nameRecord.put("nameTypeAuthor",(String)row[19]);\r
- nameRecord.put("nameTypeFullTitle",(String)row[20]);\r
- nameRecord.put("nameTypeRef",(String)row[21]);\r
- nameRecord.put("inRefSeries",(String)row[22]);\r
- nameRecord.put("inRefPublishedStart",(String)row[23]);\r
- nameRecord.put("inRefPublishedEnd",(String)row[24]);\r
- nameRecord.put("inRefVolume",(String)row[25]);\r
- nameRecords.add(nameRecord);\r
- }\r
-\r
- return nameRecords;\r
-\r
-\r
-\r
- }\r
-\r
-\r
-\r
-\r
+/**
+ * Copyright (C) 2007 EDIT
+ * European Distributed Institute of Taxonomy
+ * http://www.e-taxonomy.eu
+ *
+ * The contents of this file are subject to the Mozilla Public License Version 1.1
+ * See LICENSE.TXT at the top of this package for the full license terms.
+ *
+ */
+package eu.etaxonomy.cdm.persistence.dao.hibernate.name;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Set;
+import java.util.UUID;
+
+import org.apache.log4j.Logger;
+import org.hibernate.Criteria;
+import org.hibernate.Query;
+import org.hibernate.SQLQuery;
+import org.hibernate.criterion.Criterion;
+import org.hibernate.criterion.Order;
+import org.hibernate.criterion.Projections;
+import org.hibernate.criterion.Restrictions;
+import org.hibernate.envers.query.AuditEntity;
+import org.hibernate.envers.query.AuditQuery;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Qualifier;
+import org.springframework.stereotype.Repository;
+
+import eu.etaxonomy.cdm.model.common.CdmBase;
+import eu.etaxonomy.cdm.model.common.RelationshipBase;
+import eu.etaxonomy.cdm.model.name.BacterialName;
+import eu.etaxonomy.cdm.model.name.BotanicalName;
+import eu.etaxonomy.cdm.model.name.CultivarPlantName;
+import eu.etaxonomy.cdm.model.name.HomotypicalGroup;
+import eu.etaxonomy.cdm.model.name.HybridRelationship;
+import eu.etaxonomy.cdm.model.name.HybridRelationshipType;
+import eu.etaxonomy.cdm.model.name.NameRelationship;
+import eu.etaxonomy.cdm.model.name.NameRelationshipType;
+import eu.etaxonomy.cdm.model.name.NonViralName;
+import eu.etaxonomy.cdm.model.name.Rank;
+import eu.etaxonomy.cdm.model.name.SpecimenTypeDesignationStatus;
+import eu.etaxonomy.cdm.model.name.TaxonNameBase;
+import eu.etaxonomy.cdm.model.name.TypeDesignationBase;
+import eu.etaxonomy.cdm.model.name.TypeDesignationStatusBase;
+import eu.etaxonomy.cdm.model.name.ViralName;
+import eu.etaxonomy.cdm.model.name.ZoologicalName;
+import eu.etaxonomy.cdm.model.taxon.TaxonBase;
+import eu.etaxonomy.cdm.model.view.AuditEvent;
+import eu.etaxonomy.cdm.persistence.dao.hibernate.common.IdentifiableDaoBase;
+import eu.etaxonomy.cdm.persistence.dao.name.IHomotypicalGroupDao;
+import eu.etaxonomy.cdm.persistence.dao.name.ITaxonNameDao;
+import eu.etaxonomy.cdm.persistence.dao.taxon.ITaxonDao;
+import eu.etaxonomy.cdm.persistence.dto.UuidAndTitleCache;
+import eu.etaxonomy.cdm.persistence.query.MatchMode;
+import eu.etaxonomy.cdm.persistence.query.OrderHint;
+
+/**
+ * @author a.mueller
+ *
+ */
+@Repository
+@Qualifier("taxonNameDaoHibernateImpl")
+public class TaxonNameDaoHibernateImpl extends IdentifiableDaoBase<TaxonNameBase> implements ITaxonNameDao {
+
+ private static final Logger logger = Logger.getLogger(TaxonNameDaoHibernateImpl.class);
+
+ @Autowired
+ private ITaxonDao taxonDao;
+
+ @Autowired
+
+ private IHomotypicalGroupDao homotypicalGroupDao;
+
+ public TaxonNameDaoHibernateImpl() {
+ super(TaxonNameBase.class);
+ indexedClasses = new Class[6];
+ indexedClasses[0] = BacterialName.class;
+ indexedClasses[1] = BotanicalName.class;
+ indexedClasses[2] = CultivarPlantName.class;
+ indexedClasses[3] = NonViralName.class;
+ indexedClasses[4] = ViralName.class;
+ indexedClasses[5] = ZoologicalName.class;
+ }
+
+ @Override
+ public int countHybridNames(NonViralName name, HybridRelationshipType type) {
+ AuditEvent auditEvent = getAuditEventFromContext();
+ if(auditEvent.equals(AuditEvent.CURRENT_VIEW)) {
+ Query query = null;
+ if(type == null) {
+ query = getSession().createQuery("select count(relation) from HybridRelationship relation where relation.relatedFrom = :name");
+ } else {
+ query = getSession().createQuery("select count(relation) from HybridRelationship relation where relation.relatedFrom = :name and relation.type = :type");
+ query.setParameter("type", type);
+ }
+ query.setParameter("name",name);
+ return ((Long)query.uniqueResult()).intValue();
+ } else {
+ AuditQuery query = getAuditReader().createQuery().forEntitiesAtRevision(HybridRelationship.class,auditEvent.getRevisionNumber());
+ query.add(AuditEntity.relatedId("relatedFrom").eq(name.getId()));
+ query.addProjection(AuditEntity.id().count());
+
+ if(type != null) {
+ query.add(AuditEntity.relatedId("type").eq(type.getId()));
+ }
+
+ return ((Long)query.getSingleResult()).intValue();
+ }
+ }
+
+ @Override
+ public int countNames(String queryString) {
+ checkNotInPriorView("TaxonNameDaoHibernateImpl.countNames(String queryString)");
+ Criteria criteria = getSession().createCriteria(TaxonNameBase.class);
+
+ if (queryString != null) {
+ criteria.add(Restrictions.ilike("nameCache", queryString));
+ }
+ criteria.setProjection(Projections.projectionList().add(Projections.rowCount()));
+
+ return ((Number)criteria.uniqueResult()).intValue();
+ }
+
+ @Override
+ public int countNames(String queryString, MatchMode matchMode, List<Criterion> criteria) {
+
+ Criteria crit = getSession().createCriteria(type);
+ if (matchMode == MatchMode.EXACT) {
+ crit.add(Restrictions.eq("nameCache", matchMode.queryStringFrom(queryString)));
+ } else {
+ crit.add(Restrictions.ilike("nameCache", matchMode.queryStringFrom(queryString)));
+ }
+ if(criteria != null) {
+ for (Criterion criterion : criteria) {
+ crit.add(criterion);
+ }
+ }
+
+ crit.setProjection(Projections.projectionList().add(Projections.rowCount()));
+ return ((Number)crit.uniqueResult()).intValue();
+ }
+
+ @Override
+ public int countNames(String genusOrUninomial, String infraGenericEpithet, String specificEpithet, String infraSpecificEpithet, Rank rank) {
+ AuditEvent auditEvent = getAuditEventFromContext();
+ if(auditEvent.equals(AuditEvent.CURRENT_VIEW)) {
+ Criteria criteria = getSession().createCriteria(TaxonNameBase.class);
+
+ /**
+ * Given HHH-2951 - "Restrictions.eq when passed null, should create a NullRestriction"
+ * We need to convert nulls to NullRestrictions for now
+ */
+ if(genusOrUninomial != null) {
+ criteria.add(Restrictions.eq("genusOrUninomial",genusOrUninomial));
+ } else {
+ criteria.add(Restrictions.isNull("genusOrUninomial"));
+ }
+
+ if(infraGenericEpithet != null) {
+ criteria.add(Restrictions.eq("infraGenericEpithet", infraGenericEpithet));
+ } else {
+ criteria.add(Restrictions.isNull("infraGenericEpithet"));
+ }
+
+ if(specificEpithet != null) {
+ criteria.add(Restrictions.eq("specificEpithet", specificEpithet));
+ } else {
+ criteria.add(Restrictions.isNull("specificEpithet"));
+ }
+
+ if(infraSpecificEpithet != null) {
+ criteria.add(Restrictions.eq("infraSpecificEpithet",infraSpecificEpithet));
+ } else {
+ criteria.add(Restrictions.isNull("infraSpecificEpithet"));
+ }
+
+ if(rank != null) {
+ criteria.add(Restrictions.eq("rank", rank));
+ }
+
+ criteria.setProjection(Projections.rowCount());
+ return ((Number)criteria.uniqueResult()).intValue();
+ } else {
+ AuditQuery query = getAuditReader().createQuery().forEntitiesAtRevision(TaxonNameBase.class,auditEvent.getRevisionNumber());
+
+ if(genusOrUninomial != null) {
+ query.add(AuditEntity.property("genusOrUninomial").eq(genusOrUninomial));
+ } else {
+ query.add(AuditEntity.property("genusOrUninomial").isNull());
+ }
+
+ if(infraGenericEpithet != null) {
+ query.add(AuditEntity.property("infraGenericEpithet").eq(infraGenericEpithet));
+ } else {
+ query.add(AuditEntity.property("infraGenericEpithet").isNull());
+ }
+
+ if(specificEpithet != null) {
+ query.add(AuditEntity.property("specificEpithet").eq(specificEpithet));
+ } else {
+ query.add(AuditEntity.property("specificEpithet").isNull());
+ }
+
+ if(infraSpecificEpithet != null) {
+ query.add(AuditEntity.property("infraSpecificEpithet").eq(infraSpecificEpithet));
+ } else {
+ query.add(AuditEntity.property("infraSpecificEpithet").isNull());
+ }
+
+ if(rank != null) {
+ query.add(AuditEntity.relatedId("rank").eq(rank.getId()));
+ }
+
+ query.addProjection(AuditEntity.id().count());
+ return ((Long)query.getSingleResult()).intValue();
+ }
+ }
+
+ @Override
+ public int countNameRelationships(TaxonNameBase name, NameRelationship.Direction direction, NameRelationshipType type) {
+
+ AuditEvent auditEvent = getAuditEventFromContext();
+ if(auditEvent.equals(AuditEvent.CURRENT_VIEW)) {
+ Query query = null;
+ if(type == null) {
+ query = getSession().createQuery("select count(relation) from NameRelationship relation where relation." + direction +" = :name");
+ } else {
+ query = getSession().createQuery("select count(relation) from NameRelationship relation where relation." + direction +" = :name and relation.type = :type");
+ query.setParameter("type", type);
+ }
+ query.setParameter("name",name);
+ return ((Long)query.uniqueResult()).intValue();
+ } else {
+ AuditQuery query = getAuditReader().createQuery().forEntitiesAtRevision(NameRelationship.class,auditEvent.getRevisionNumber());
+ query.add(AuditEntity.relatedId(direction.toString()).eq(name.getId()));
+ query.addProjection(AuditEntity.id().count());
+
+ if(type != null) {
+ query.add(AuditEntity.relatedId("type").eq(type.getId()));
+ }
+
+ return ((Long)query.getSingleResult()).intValue();
+ }
+ }
+
+
+ @Override
+ public int countTypeDesignations(TaxonNameBase name, SpecimenTypeDesignationStatus status) {
+ checkNotInPriorView("countTypeDesignations(TaxonNameBase name, SpecimenTypeDesignationStatus status)");
+ Query query = null;
+ if(status == null) {
+ query = getSession().createQuery("select count(designation) from TypeDesignationBase designation join designation.typifiedNames name where name = :name");
+ } else {
+ query = getSession().createQuery("select count(designation) from TypeDesignationBase designation join designation.typifiedNames name where name = :name and designation.typeStatus = :status");
+ query.setParameter("status", status);
+ }
+ query.setParameter("name",name);
+ return ((Long)query.uniqueResult()).intValue();
+ }
+
+ @Override
+ public List<HybridRelationship> getHybridNames(NonViralName name, HybridRelationshipType type, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) {
+ AuditEvent auditEvent = getAuditEventFromContext();
+ if(auditEvent.equals(AuditEvent.CURRENT_VIEW)) {
+ Criteria criteria = getSession().createCriteria(HybridRelationship.class);
+ criteria.add(Restrictions.eq("relatedFrom", name));
+ if(type != null) {
+ criteria.add(Restrictions.eq("type", type));
+ }
+
+ if(pageSize != null) {
+ criteria.setMaxResults(pageSize);
+ if(pageNumber != null) {
+ criteria.setFirstResult(pageNumber * pageSize);
+ } else {
+ criteria.setFirstResult(0);
+ }
+ }
+
+ addOrder(criteria, orderHints);
+
+ List<HybridRelationship> results = criteria.list();
+ defaultBeanInitializer.initializeAll(results, propertyPaths);
+ return results;
+ } else {
+ AuditQuery query = getAuditReader().createQuery().forEntitiesAtRevision(HybridRelationship.class,auditEvent.getRevisionNumber());
+ query.add(AuditEntity.relatedId("relatedFrom").eq(name.getId()));
+
+ if(type != null) {
+ query.add(AuditEntity.relatedId("type").eq(type.getId()));
+ }
+
+ if(pageSize != null) {
+ query.setMaxResults(pageSize);
+ if(pageNumber != null) {
+ query.setFirstResult(pageNumber * pageSize);
+ } else {
+ query.setFirstResult(0);
+ }
+ }
+
+ List<HybridRelationship> results = query.getResultList();
+ defaultBeanInitializer.initializeAll(results, propertyPaths);
+ return results;
+ }
+ }
+
+ @Override
+ public List<NameRelationship> getNameRelationships(TaxonNameBase name, NameRelationship.Direction direction,
+ NameRelationshipType type, Integer pageSize, Integer pageNumber, List<OrderHint> orderHints,
+ List<String> propertyPaths) {
+
+ AuditEvent auditEvent = getAuditEventFromContext();
+ if(auditEvent.equals(AuditEvent.CURRENT_VIEW)) {
+ Criteria criteria = getSession().createCriteria(NameRelationship.class);
+ if (name != null || direction != null){
+ criteria.add(Restrictions.eq(direction.toString(), name));
+ }
+ if(type != null) {
+ criteria.add(Restrictions.eq("type", type));
+ }
+
+ if(pageSize != null) {
+ criteria.setMaxResults(pageSize);
+ if(pageNumber != null) {
+ criteria.setFirstResult(pageNumber * pageSize);
+ } else {
+ criteria.setFirstResult(0);
+ }
+ }
+ addOrder(criteria, orderHints);
+
+ List<NameRelationship> results = criteria.list();
+ defaultBeanInitializer.initializeAll(results, propertyPaths);
+ return results;
+ } else {
+ AuditQuery query = getAuditReader().createQuery().forEntitiesAtRevision(NameRelationship.class,auditEvent.getRevisionNumber());
+ query.add(AuditEntity.relatedId(direction.toString()).eq(name.getId()));
+
+ if(type != null) {
+ query.add(AuditEntity.relatedId("type").eq(type.getId()));
+ }
+
+ if(pageSize != null) {
+ query.setMaxResults(pageSize);
+ if(pageNumber != null) {
+ query.setFirstResult(pageNumber * pageSize);
+ } else {
+ query.setFirstResult(0);
+ }
+ }
+
+ List<NameRelationship> results = query.getResultList();
+ defaultBeanInitializer.initializeAll(results, propertyPaths);
+ return results;
+ }
+ }
+
+ @Override
+ public List<TypeDesignationBase> getTypeDesignations(TaxonNameBase name, TypeDesignationStatusBase status, Integer pageSize, Integer pageNumber, List<String> propertyPaths){
+ return getTypeDesignations(name, null, status, pageSize, pageNumber, propertyPaths);
+ }
+
+ @Override
+ public <T extends TypeDesignationBase> List<T> getTypeDesignations(TaxonNameBase name,
+ Class<T> type,
+ TypeDesignationStatusBase status, Integer pageSize, Integer pageNumber,
+ List<String> propertyPaths){
+ checkNotInPriorView("getTypeDesignations(TaxonNameBase name,TypeDesignationStatusBase status, Integer pageSize, Integer pageNumber, List<String> propertyPaths)");
+ Query query = null;
+ String queryString = "select designation from TypeDesignationBase designation join designation.typifiedNames name where name = :name";
+
+ if(status != null) {
+ queryString += " and designation.typeStatus = :status";
+ }
+ if(type != null){
+ queryString += " and designation.class = :type";
+ }
+
+ query = getSession().createQuery(queryString);
+
+ if(status != null) {
+ query.setParameter("status", status);
+ }
+ if(type != null){
+ query.setParameter("type", type.getSimpleName());
+ }
+
+ query.setParameter("name",name);
+
+ if(pageSize != null) {
+ query.setMaxResults(pageSize);
+ if(pageNumber != null) {
+ query.setFirstResult(pageNumber * pageSize);
+ } else {
+ query.setFirstResult(0);
+ }
+ }
+ return defaultBeanInitializer.initializeAll((List<T>)query.list(), propertyPaths);
+ }
+
+
+ public List<TaxonNameBase<?,?>> searchNames(String queryString, MatchMode matchMode, Integer pageSize, Integer pageNumber) {
+ checkNotInPriorView("TaxonNameDaoHibernateImpl.searchNames(String queryString, Integer pageSize, Integer pageNumber)");
+ Criteria criteria = getSession().createCriteria(TaxonNameBase.class);
+
+ if (queryString != null) {
+ criteria.add(Restrictions.ilike("nameCache", queryString));
+ }
+ if(pageSize != null) {
+ criteria.setMaxResults(pageSize);
+ if(pageNumber != null) {
+ criteria.setFirstResult(pageNumber * pageSize);
+ } else {
+ criteria.setFirstResult(0);
+ }
+ }
+ List<TaxonNameBase<?,?>> results = criteria.list();
+ return results;
+ }
+
+
+ @Override
+ public List<TaxonNameBase<?,?>> searchNames(String queryString, Integer pageSize, Integer pageNumber) {
+ return searchNames(queryString, MatchMode.BEGINNING, pageSize, pageNumber);
+ }
+
+
+ @Override
+ public List<TaxonNameBase> searchNames(String genusOrUninomial,String infraGenericEpithet, String specificEpithet, String infraSpecificEpithet, Rank rank, Integer pageSize,Integer pageNumber, List<OrderHint> orderHints,
+ List<String> propertyPaths) {
+ AuditEvent auditEvent = getAuditEventFromContext();
+ if(auditEvent.equals(AuditEvent.CURRENT_VIEW)) {
+ Criteria criteria = getSession().createCriteria(TaxonNameBase.class);
+
+ /**
+ * Given HHH-2951 - "Restrictions.eq when passed null, should create a NullRestriction"
+ * We need to convert nulls to NullRestrictions for now
+ */
+ if(genusOrUninomial != null) {
+ criteria.add(Restrictions.eq("genusOrUninomial",genusOrUninomial));
+ } else {
+ criteria.add(Restrictions.isNull("genusOrUninomial"));
+ }
+
+ if(infraGenericEpithet != null) {
+ criteria.add(Restrictions.eq("infraGenericEpithet", infraGenericEpithet));
+ } else {
+ criteria.add(Restrictions.isNull("infraGenericEpithet"));
+ }
+
+ if(specificEpithet != null) {
+ criteria.add(Restrictions.eq("specificEpithet", specificEpithet));
+ } else {
+ criteria.add(Restrictions.isNull("specificEpithet"));
+ }
+
+ if(infraSpecificEpithet != null) {
+ criteria.add(Restrictions.eq("infraSpecificEpithet",infraSpecificEpithet));
+ } else {
+ criteria.add(Restrictions.isNull("infraSpecificEpithet"));
+ }
+
+ if(rank != null) {
+ criteria.add(Restrictions.eq("rank", rank));
+ }
+
+ if(pageSize != null) {
+ criteria.setMaxResults(pageSize);
+ if(pageNumber != null) {
+ criteria.setFirstResult(pageNumber * pageSize);
+ } else {
+ criteria.setFirstResult(0);
+ }
+ }
+
+ addOrder(criteria, orderHints);
+
+ List<TaxonNameBase> results = criteria.list();
+ defaultBeanInitializer.initializeAll(results, propertyPaths);
+ return results;
+ } else {
+ AuditQuery query = getAuditReader().createQuery().forEntitiesAtRevision(TaxonNameBase.class,auditEvent.getRevisionNumber());
+
+ if(genusOrUninomial != null) {
+ query.add(AuditEntity.property("genusOrUninomial").eq(genusOrUninomial));
+ } else {
+ query.add(AuditEntity.property("genusOrUninomial").isNull());
+ }
+
+ if(infraGenericEpithet != null) {
+ query.add(AuditEntity.property("infraGenericEpithet").eq(infraGenericEpithet));
+ } else {
+ query.add(AuditEntity.property("infraGenericEpithet").isNull());
+ }
+
+ if(specificEpithet != null) {
+ query.add(AuditEntity.property("specificEpithet").eq(specificEpithet));
+ } else {
+ query.add(AuditEntity.property("specificEpithet").isNull());
+ }
+
+ if(infraSpecificEpithet != null) {
+ query.add(AuditEntity.property("infraSpecificEpithet").eq(infraSpecificEpithet));
+ } else {
+ query.add(AuditEntity.property("infraSpecificEpithet").isNull());
+ }
+
+ if(rank != null) {
+ query.add(AuditEntity.relatedId("rank").eq(rank.getId()));
+ }
+
+ if(pageSize != null) {
+ query.setMaxResults(pageSize);
+ if(pageNumber != null) {
+ query.setFirstResult(pageNumber * pageSize);
+ } else {
+ query.setFirstResult(0);
+ }
+ }
+
+ List<TaxonNameBase> results = query.getResultList();
+ defaultBeanInitializer.initializeAll(results, propertyPaths);
+ return results;
+ }
+ }
+
+ @Override
+ public List<? extends TaxonNameBase<?,?>> findByName(String queryString,
+ MatchMode matchmode, Integer pageSize, Integer pageNumber, List<Criterion> criteria, List<String> propertyPaths) {
+
+ Criteria crit = getSession().createCriteria(type);
+ if (matchmode == MatchMode.EXACT) {
+ crit.add(Restrictions.eq("nameCache", matchmode.queryStringFrom(queryString)));
+ } else {
+ crit.add(Restrictions.ilike("nameCache", matchmode.queryStringFrom(queryString)));
+ }
+ if(criteria != null){
+ for (Criterion criterion : criteria) {
+ crit.add(criterion);
+ }
+ }
+ crit.addOrder(Order.asc("nameCache"));
+
+ if(pageSize != null) {
+ crit.setMaxResults(pageSize);
+ if(pageNumber != null) {
+ crit.setFirstResult(pageNumber * pageSize);
+ }
+ }
+
+ List<? extends TaxonNameBase<?,?>> results = crit.list();
+ defaultBeanInitializer.initializeAll(results, propertyPaths);
+
+ return results;
+ }
+
+ @Override
+ public List<? extends TaxonNameBase<?,?>> findByTitle(String queryString,
+ MatchMode matchmode, Integer pageSize, Integer pageNumber, List<Criterion> criteria, List<String> propertyPaths) {
+
+ Criteria crit = getSession().createCriteria(type);
+ if (matchmode == MatchMode.EXACT) {
+ crit.add(Restrictions.eq("titleCache", matchmode.queryStringFrom(queryString)));
+ } else {
+ crit.add(Restrictions.ilike("titleCache", matchmode.queryStringFrom(queryString)));
+ }
+ if(criteria != null){
+ for (Criterion criterion : criteria) {
+ crit.add(criterion);
+ }
+ }
+ crit.addOrder(Order.asc("titleCache"));
+
+ if(pageSize != null) {
+ crit.setMaxResults(pageSize);
+ if(pageNumber != null) {
+ crit.setFirstResult(pageNumber * pageSize);
+ }
+ }
+
+ List<? extends TaxonNameBase<?,?>> results = crit.list();
+ defaultBeanInitializer.initializeAll(results, propertyPaths);
+
+ return results;
+ }
+
+
+ @Override
+ public TaxonNameBase<?,?> findByUuid(UUID uuid, List<Criterion> criteria, List<String> propertyPaths) {
+
+ Criteria crit = getSession().createCriteria(type);
+
+ if (uuid != null) {
+ crit.add(Restrictions.eq("uuid", uuid));
+ } else {
+ logger.warn("UUID is NULL");
+ return null;
+ }
+ if(criteria != null){
+ for (Criterion criterion : criteria) {
+ crit.add(criterion);
+ }
+ }
+ crit.addOrder(Order.asc("uuid"));
+
+ List<? extends TaxonNameBase<?,?>> results = crit.list();
+ if (results.size() == 1) {
+ defaultBeanInitializer.initializeAll(results, propertyPaths);
+ TaxonNameBase<?, ?> taxonName = results.iterator().next();
+ return taxonName;
+ } else if (results.size() > 1) {
+ logger.error("Multiple results for UUID: " + uuid);
+ } else if (results.size() == 0) {
+ logger.info("No results for UUID: " + uuid);
+ }
+
+ return null;
+ }
+
+ @Override
+ public List<RelationshipBase> getAllRelationships(Integer limit, Integer start) {
+ AuditEvent auditEvent = getAuditEventFromContext();
+ if(auditEvent.equals(AuditEvent.CURRENT_VIEW)) {
+ // for some reason the HQL .class discriminator didn't work here so I created this preliminary
+ // implementation for now. Should be cleaned in future.
+
+ List<RelationshipBase> result = new ArrayList<RelationshipBase>();
+
+ int nameRelSize = countAllRelationships(NameRelationship.class);
+ if (nameRelSize > start){
+
+ String hql = " FROM %s as rb ORDER BY rb.id ";
+ hql = String.format(hql, NameRelationship.class.getSimpleName());
+ Query query = getSession().createQuery(hql);
+ query.setFirstResult(start);
+ if (limit != null){
+ query.setMaxResults(limit);
+ }
+ result = query.list();
+ }
+ limit = limit - result.size();
+ if (limit > 0){
+ String hql = " FROM HybridRelationship as rb ORDER BY rb.id ";
+ hql = String.format(hql, HybridRelationship.class.getSimpleName());
+ Query query = getSession().createQuery(hql);
+ start = (nameRelSize > start) ? 0 : (start - nameRelSize);
+ query.setFirstResult(start);
+ if (limit != null){
+ query.setMaxResults(limit);
+ }
+ result.addAll( query.list());
+ }
+ return result;
+ } else {
+ AuditQuery query = getAuditReader().createQuery().forEntitiesAtRevision(RelationshipBase.class,auditEvent.getRevisionNumber());
+ return query.getResultList();
+ }
+ }
+
+
+ /**
+ * TODO not yet in interface
+ * @param clazz
+ * @return
+ */
+ public int countAllRelationships(Class<? extends RelationshipBase> clazz) {
+ if (clazz != null && ! NameRelationship.class.isAssignableFrom(clazz) && ! HybridRelationship.class.isAssignableFrom(clazz) ){
+ throw new RuntimeException("Class must be assignable by a taxon or snonym relation");
+ }
+ int size = 0;
+
+ if (clazz == null || NameRelationship.class.isAssignableFrom(clazz)){
+ String hql = " SELECT count(rel) FROM NameRelationship rel";
+ size += (Long)getSession().createQuery(hql).list().get(0);
+ }
+ if (clazz == null || HybridRelationship.class.isAssignableFrom(clazz)){
+ String hql = " SELECT count(rel) FROM HybridRelationship rel";
+ size += (Long)getSession().createQuery(hql).list().get(0);
+ }
+ return size;
+ }
+
+
+ @Override
+ public Integer countByName(String queryString, MatchMode matchmode, List<Criterion> criteria) {
+ //TODO improve performance
+ List<? extends TaxonNameBase<?,?>> results = findByName(queryString, matchmode, null, null, criteria, null);
+ return results.size();
+
+ }
+
+ @Override
+ public List<UuidAndTitleCache> getUuidAndTitleCacheOfNames() {
+ String queryString = "SELECT uuid, id, fullTitleCache FROM TaxonNameBase";
+
+ @SuppressWarnings("unchecked")
+ List<Object[]> result = getSession().createSQLQuery(queryString).list();
+
+ if(result.size() == 0){
+ return null;
+ }else{
+ List<UuidAndTitleCache> list = new ArrayList<UuidAndTitleCache>(result.size());
+
+ for (Object object : result){
+
+ Object[] objectArray = (Object[]) object;
+
+ UUID uuid = UUID.fromString((String) objectArray[0]);
+ Integer id = (Integer) objectArray[1];
+ String titleCache = (String) objectArray[2];
+
+ list.add(new UuidAndTitleCache(type, uuid, id, titleCache));
+ }
+
+ return list;
+ }
+ }
+
+ @Override
+ public Integer countByName(Class<? extends TaxonNameBase> clazz,String queryString, MatchMode matchmode, List<Criterion> criteria) {
+ return super.countByParam(clazz, "nameCache", queryString, matchmode, criteria);
+ }
+
+ @Override
+ public List<TaxonNameBase> findByName(Class<? extends TaxonNameBase> clazz, String queryString, MatchMode matchmode, List<Criterion> criteria,Integer pageSize, Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths) {
+ return super.findByParam(clazz, "nameCache", queryString, matchmode, criteria, pageSize, pageNumber, orderHints, propertyPaths);
+ }
+
+ @Override
+ public UUID delete (TaxonNameBase persistentObject){
+ Set<TaxonBase> taxonBases = persistentObject.getTaxonBases();
+
+ if (persistentObject == null){
+ logger.warn(type.getName() + " was 'null'");
+ return null;
+ }
+ getSession().saveOrUpdate(persistentObject);
+ UUID persUuid = persistentObject.getUuid();
+ persistentObject = this.load(persUuid);
+ UUID homotypicalGroupUUID = persistentObject.getHomotypicalGroup().getUuid();
+
+
+ for (TaxonBase taxonBase: taxonBases){
+ taxonDao.delete(taxonBase);
+ }
+ HomotypicalGroup homotypicalGroup = homotypicalGroupDao.load(homotypicalGroupUUID);
+ if (homotypicalGroup != null){
+ if (homotypicalGroup.getTypifiedNames().isEmpty()){
+ homotypicalGroupDao.delete(homotypicalGroup);
+ }
+ }
+
+ getSession().delete(persistentObject);
+ return persistentObject.getUuid();
+ }
+
+
+ @Override
+ public ZoologicalName findZoologicalNameByUUID(UUID uuid){
+ Criteria criteria = getSession().createCriteria(type);
+ if (uuid != null) {
+ criteria.add(Restrictions.eq("uuid", uuid));
+ } else {
+ logger.warn("UUID is NULL");
+ return null;
+ }
+
+ List<? extends TaxonNameBase<?,?>> results = criteria.list();
+ if (results.size() == 1) {
+ defaultBeanInitializer.initializeAll(results, null);
+ TaxonNameBase<?, ?> taxonName = results.iterator().next();
+ if (taxonName.isInstanceOf(ZoologicalName.class)) {
+ ZoologicalName zoologicalName = CdmBase.deproxy(taxonName, ZoologicalName.class);
+ return zoologicalName;
+ } else {
+ logger.warn("This UUID (" + uuid + ") does not belong to a ZoologicalName. It belongs to: " + taxonName.getUuid() + " (" + taxonName.getTitleCache() + ")");
+ }
+ } else if (results.size() > 1) {
+ logger.error("Multiple results for UUID: " + uuid);
+ } else if (results.size() == 0) {
+ logger.info("No results for UUID: " + uuid);
+ }
+ return null;
+ }
+@Override
+public List<HashMap<String,String>> getNameRecords(){
+ String sql= "SELECT"
+ + " (SELECT famName.namecache FROM TaxonNode famNode"
+ + " LEFT OUTER JOIN TaxonBase famTax ON famNode.taxon_id = famTax.id"
+ + " LEFT OUTER JOIN TaxonNameBase famName ON famTax.name_id = famName.id"
+ + " WHERE famName.rank_id = 795 AND famNode.treeIndex = SUBSTRING(tn.treeIndex, 1, length(famNode.treeIndex))"
+ + " ) as famName, "
+ + " (SELECT famName.namecache FROM TaxonNode famNode "
+ + " LEFT OUTER JOIN TaxonBase famTax ON famNode.taxon_id = famTax.id "
+ + " LEFT OUTER JOIN TaxonNameBase famName ON famTax.name_id = famName.id "
+ + " WHERE famName.rank_id = 795 AND famNode.treeIndex = SUBSTRING(tnAcc.treeIndex, 1, length(famNode.treeIndex))"
+ + " ) as accFamName,tb.DTYPE, tb.id as TaxonID ,tb.titleCache taxonTitle, tnb.rank_id as RankID, tnb.id as NameID,"
+ + " tnb.namecache as name, tnb.titleCache as nameAuthor, tnb.fullTitleCache nameAndNomRef,"
+ + " r.titleCache as nomRef, r.abbrevTitle nomRefAbbrevTitle, r.title nomRefTitle, r.datepublished_start nomRefPublishedStart, r.datepublished_end nomRefPublishedEnd, r.pages nomRefPages, inRef.abbrevTitle inRefAbbrevTitle,tnb.nomenclaturalmicroreference as detail,"
+ + " nameType.namecache nameType, nameType.titleCache nameTypeAuthor, nameType.fullTitleCache nameTypeFullTitle, nameTypeRef.titleCache nameTypeRef, inRef.seriespart as inRefSeries, inRef.datepublished_start inRefPublishedStart, inRef.datepublished_end inRefPublishedEnd, inRef.volume as inRefVolume"
+ + " FROM TaxonBase tb"
+ + " LEFT OUTER JOIN TaxonNameBase tnb ON tb.name_id = tnb.id"
+ + " LEFT OUTER JOIN Reference r ON tnb.nomenclaturalreference_id = r.id"
+ + " LEFT OUTER JOIN TaxonNode tn ON tn.taxon_id = tb.id"
+ + " LEFT OUTER JOIN TaxonNameBase_TypeDesignationBase typeMN ON typeMN.TaxonNameBase_id = tnb.id"
+ + " LEFT OUTER JOIN TypeDesignationBase tdb ON tdb.id = typeMN.typedesignations_id"
+ + " LEFT OUTER JOIN TaxonNameBase nameType ON tdb.typename_id = nameType.id"
+ + " LEFT OUTER JOIN Reference nameTypeRef ON nameType.nomenclaturalreference_id = nameTypeRef.id"
+ + " LEFT OUTER JOIN Reference inRef ON inRef.id = r.inreference_id"
+ + " LEFT OUTER JOIN SynonymRelationship sr ON tb.id = sr.relatedfrom_id"
+ + " LEFT OUTER JOIN TaxonBase accT ON accT.id = sr.relatedto_id"
+ + " LEFT OUTER JOIN TaxonNode tnAcc ON tnAcc.taxon_id = accT.id"
+ + " ORDER BY DTYPE, famName, accFamName, tnb.rank_id ,tb.titleCache";
+
+
+ SQLQuery query = getSession().createSQLQuery(sql);
+ List result = query.list();
+ //Delimiter used in CSV file
+
+
+ List<HashMap<String,String>> nameRecords = new ArrayList();
+ HashMap<String,String> nameRecord = new HashMap<String,String>();
+ for(Object object : result)
+ {
+ Object[] row = (Object[])object;
+ nameRecord = new HashMap<String,String>();
+ nameRecord.put("famName",(String)row[0]);
+ nameRecord.put("accFamName",(String)row[1]);
+
+ nameRecord.put("DTYPE",(String)row[2]);
+ nameRecord.put("TaxonID",String.valueOf(row[3]));
+ nameRecord.put("taxonTitle",(String)row[4]);
+ nameRecord.put("RankID",String.valueOf(row[5]));
+ nameRecord.put("NameID",String.valueOf(row[6]));
+ nameRecord.put("name",(String)row[7]);
+ nameRecord.put("nameAuthor",(String)row[8]);
+ nameRecord.put("nameAndNomRef",(String)row[9]);
+ nameRecord.put("nomRef",(String)row[10]);
+ nameRecord.put("nomRefAbbrevTitle",(String)row[11]);
+ nameRecord.put("nomRefTitle",(String)row[12]);
+ nameRecord.put("nomRefPublishedStart",(String)row[13]);
+ nameRecord.put("nomRefPublishedEnd",(String)row[14]);
+ nameRecord.put("nomRefPages",(String)row[15]);
+ nameRecord.put("inRefAbbrevTitle",(String)row[16]);
+ nameRecord.put("detail",(String)row[17]);
+ nameRecord.put("nameType",(String)row[18]);
+ nameRecord.put("nameTypeAuthor",(String)row[19]);
+ nameRecord.put("nameTypeFullTitle",(String)row[20]);
+ nameRecord.put("nameTypeRef",(String)row[21]);
+ nameRecord.put("inRefSeries",(String)row[22]);
+ nameRecord.put("inRefPublishedStart",(String)row[23]);
+ nameRecord.put("inRefPublishedEnd",(String)row[24]);
+ nameRecord.put("inRefVolume",(String)row[25]);
+ nameRecords.add(nameRecord);
+ }
+
+ return nameRecords;
+
+
+
+ }
+
+
+
+
}
\ No newline at end of file
if(taxonBase != null) {\r
query.add(AuditEntity.relatedId("taxon").eq(taxonBase.getId()));\r
}\r
- query.addProjection(AuditEntity.id().count("id"));\r
+ query.addProjection(AuditEntity.id().count());\r
\r
return ((Long)query.getSingleResult()).intValue();\r
}\r
Query query = prepareTaxaByName(doTaxa, doSynonyms, doMisappliedNames, "nameCache", queryString, classification, matchMode, namedAreas, pageSize, pageNumber, doCount);\r
\r
if (query != null){\r
+ @SuppressWarnings("unchecked")\r
List<TaxonBase> results = query.list();\r
\r
defaultBeanInitializer.initializeAll(results, propertyPaths);\r
} else {\r
AuditQuery query = getAuditReader().createQuery().forEntitiesAtRevision(TaxonRelationship.class,auditEvent.getRevisionNumber());\r
query.add(AuditEntity.relatedId(direction.toString()).eq(taxon.getId()));\r
- query.addProjection(AuditEntity.id().count("id"));\r
+ query.addProjection(AuditEntity.id().count());\r
\r
if(type != null) {\r
query.add(AuditEntity.relatedId("type").eq(type.getId()));\r
} else {\r
AuditQuery query = getAuditReader().createQuery().forEntitiesAtRevision(SynonymRelationship.class,auditEvent.getRevisionNumber());\r
query.add(AuditEntity.relatedId("relatedTo").eq(taxon.getId()));\r
- query.addProjection(AuditEntity.id().count("id"));\r
+ query.addProjection(AuditEntity.id().count());\r
\r
if(type != null) {\r
query.add(AuditEntity.relatedId("type").eq(type.getId()));\r
} else {\r
AuditQuery query = getAuditReader().createQuery().forEntitiesAtRevision(SynonymRelationship.class,auditEvent.getRevisionNumber());\r
query.add(AuditEntity.relatedId("relatedFrom").eq(synonym.getId()));\r
- query.addProjection(AuditEntity.id().count("id"));\r
+ query.addProjection(AuditEntity.id().count());\r
\r
if(type != null) {\r
query.add(AuditEntity.relatedId("type").eq(type.getId()));\r
} else {\r
AuditQuery query = getAuditReader().createQuery().forEntitiesAtRevision(TaxonRelationship.class,auditEvent.getRevisionNumber());\r
query.add(AuditEntity.relatedId(relatedfrom.toString()).eq(taxonBase.getId()));\r
- query.addProjection(AuditEntity.id().count("id"));\r
+ query.addProjection(AuditEntity.id().count());\r
\r
if(type != null) {\r
query.add(AuditEntity.relatedId("type").eq(type.getId()));\r
-// $Id$\r
-/**\r
- * Copyright (C) 2015 EDIT\r
- * European Distributed Institute of Taxonomy\r
- * http://www.e-taxonomy.eu\r
- *\r
- * The contents of this file are subject to the Mozilla Public License Version 1.1\r
- * See LICENSE.TXT at the top of this package for the full license terms.\r
- */\r
-package eu.etaxonomy.cdm.persistence.dao.jdbc.validation;\r
-\r
-import java.sql.Connection;\r
-import java.sql.Date;\r
-import java.sql.PreparedStatement;\r
-import java.sql.ResultSet;\r
-import java.sql.SQLException;\r
-import java.sql.Types;\r
-import java.util.HashSet;\r
-import java.util.Iterator;\r
-import java.util.Set;\r
-import java.util.UUID;\r
-\r
-import javax.sql.DataSource;\r
-import javax.validation.ConstraintViolation;\r
-\r
-import org.apache.log4j.Logger;\r
-import org.joda.time.DateTime;\r
-import org.springframework.beans.factory.annotation.Autowired;\r
-import org.springframework.stereotype.Repository;\r
-\r
-import eu.etaxonomy.cdm.model.common.ICdmBase;\r
-import eu.etaxonomy.cdm.model.validation.CRUDEventType;\r
-import eu.etaxonomy.cdm.model.validation.EntityConstraintViolation;\r
-import eu.etaxonomy.cdm.model.validation.EntityValidation;\r
-import eu.etaxonomy.cdm.model.validation.EntityValidationStatus;\r
-import eu.etaxonomy.cdm.model.validation.Severity;\r
-import eu.etaxonomy.cdm.persistence.dao.jdbc.JdbcDaoUtils;\r
-import eu.etaxonomy.cdm.persistence.dao.validation.IEntityValidationCrud;\r
-\r
-/**\r
- * @author ayco_holleman\r
- * @date 16 jan. 2015\r
- *\r
- */\r
-@Repository\r
-public class EntityValidationCrudJdbcImpl implements IEntityValidationCrud {\r
-\r
- public static final Logger logger = Logger.getLogger(EntityValidationCrudJdbcImpl.class);\r
-\r
- private static final String SQL_INSERT_VALIDATION_RESULT = "INSERT INTO entityvalidation"\r
- + "(id, created, uuid, crudeventtype, validatedentityclass, validatedentityid,"\r
- + "validatedentityuuid, userfriendlydescription, userfriendlytypename, validationcount,"\r
- + "updated, status, createdby_id) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?)";\r
-\r
- private static final int vr_id = 1;\r
- private static final int vr_created = 2;\r
- private static final int vr_uuid = 3;\r
- private static final int vr_crudeventtype = 4;\r
- private static final int vr_validatedentityclass = 5;\r
- private static final int vr_validatedentityid = 6;\r
- private static final int vr_validatedentityuuid = 7;\r
- private static final int vr_userfriendlydescription = 8;\r
- private static final int vr_userfriendlytypename = 9;\r
- private static final int vr_validationcount = 10;\r
- private static final int vr_updated = 11;\r
- private static final int vr_status = 12;\r
- private static final int vr_createdby_id = 13;\r
-\r
- private static final String SQL_INSERT_CONSTRAINT_VIOLATION = "INSERT INTO entityconstraintviolation"\r
- + "(id, created, uuid, invalidvalue, message, propertypath, userfriendlyfieldname, severity,"\r
- + "validator, validationgroup, createdby_id, entityvalidation_id) VALUES (?,?,?,?,?,?,?,?,?,?,?,?)";\r
-\r
- private static final int cv_id = 1;\r
- private static final int cv_created = 2;\r
- private static final int cv_uuid = 3;\r
- private static final int cv_invalidvalue = 4;\r
- private static final int cv_message = 5;\r
- private static final int cv_propertypath = 6;\r
- private static final int cv_userfriendlyfieldname = 7;\r
- private static final int cv_severity = 8;\r
- private static final int cv_validator = 9;\r
- private static final int cv_validationgroup = 10;\r
- private static final int cv_createdby_id = 11;\r
- private static final int cv_entityvalidation_id = 12;\r
-\r
- @Autowired\r
- private DataSource datasource;\r
-\r
- public EntityValidationCrudJdbcImpl() {\r
-\r
- }\r
-\r
- public EntityValidationCrudJdbcImpl(DataSource datasource) {\r
- this.datasource = datasource;\r
- }\r
-\r
- public void setDatasource(DataSource datasource) {\r
- this.datasource = datasource;\r
- }\r
-\r
- @Override\r
- public <T extends ICdmBase> void saveEntityValidation(T validatedEntity, Set<ConstraintViolation<T>> errors,\r
- CRUDEventType crudEventType, Class<?>[] validationGroups) {\r
- saveEntityValidation(createEntityValidation(validatedEntity, errors, crudEventType), validationGroups);\r
- }\r
-\r
- // This is the method that's tested by the unit tests\r
- // rather than the interface method above, because it\r
- // is almost impossible to create a mock instance of\r
- // ConstraintViolation<T>\r
- void saveEntityValidation(EntityValidation newValidation, Class<?>[] validationGroups) {\r
- Connection conn = null;\r
- EntityValidation tmp = null;\r
- try {\r
- conn = datasource.getConnection();\r
- JdbcDaoUtils.startTransaction(conn);\r
- String entityClass = newValidation.getValidatedEntityClass();\r
- int entityId = newValidation.getValidatedEntityId();\r
- EntityValidation oldValidation = getEntityValidation(conn, entityClass, entityId);\r
- if (oldValidation == null) {\r
- tmp = newValidation;\r
- /*\r
- * The entity has never been validated before. We should now\r
- * create an entityvalidation record whether or not the entity\r
- * has errors, because the entity HAS been validated so its\r
- * validationcount is now 1.\r
- */\r
- saveEntityValidationRecord(conn, newValidation);\r
- Set<EntityConstraintViolation> errors = newValidation.getEntityConstraintViolations();\r
- if (errors != null && errors.size() != 0) {\r
- saveErrorRecords(conn, newValidation);\r
- }\r
-\r
- } else {\r
- tmp = oldValidation;\r
- // Increase validation counter\r
- increaseValidationCounter(conn, oldValidation);\r
-\r
- // Delete obsolete errors, that is, errors from the previous\r
- // validation that have disappeared from the new validation\r
- // even though they belong to the same validation group\r
- dontDeleteErrorsInOtherValidationGroups(oldValidation, validationGroups);\r
- // Now all errors have been removed from the previous validation\r
- // that don't belong to the validation group(s) applied by the\r
- // current validation. Set them apart because we need them\r
- HashSet<EntityConstraintViolation> oldErrors = new HashSet<EntityConstraintViolation>(\r
- oldValidation.getEntityConstraintViolations());\r
- oldValidation.getEntityConstraintViolations().removeAll(newValidation.getEntityConstraintViolations());\r
- // Now we're left with previous errors that have disappeared\r
- // from the current validation (they have become obsolete)\r
- deleteObsoleteErrors(conn, oldValidation);\r
-\r
- // From the new errors delete all that are identical to\r
- // errors from a previous validation (identical as per the\r
- // equals() method of EntityConstraintViolation). These\r
- // errors will not replace the old ones in order to limit\r
- // the number of INSERTs.\r
- newValidation.getEntityConstraintViolations().removeAll(oldErrors);\r
- saveErrorRecords(conn, newValidation);\r
- }\r
- conn.commit();\r
- setStatus(conn, tmp, EntityValidationStatus.OK);\r
- } catch (Throwable t) {\r
- logger.error("Error while saving validation result:", t);\r
- setStatus(conn, tmp, EntityValidationStatus.ERROR);\r
- JdbcDaoUtils.rollback(conn);\r
- } finally {\r
- JdbcDaoUtils.close(conn);\r
- }\r
- }\r
-\r
- @Override\r
- public void deleteEntityValidation(String validatedEntityClass, int validatedEntityId) {\r
- Connection conn = null;\r
- try {\r
- conn = datasource.getConnection();\r
- JdbcDaoUtils.startTransaction(conn);\r
- int validationResultId = getValidationResultId(conn, validatedEntityClass, validatedEntityId);\r
- if (validationResultId == -1) {\r
- return;\r
- }\r
- deleteValidationResultRecord(conn, validationResultId);\r
- deletedErrorRecords(conn, validationResultId, null);\r
- conn.commit();\r
- } catch (Throwable t) {\r
- JdbcDaoUtils.rollback(conn);\r
- }\r
- JdbcDaoUtils.close(conn);\r
- }\r
-\r
- private static <T extends ICdmBase> EntityValidation createEntityValidation(T validatedEntity,\r
- Set<ConstraintViolation<T>> errors, CRUDEventType crudEventType) {\r
- EntityValidation entityValidation = EntityValidation.newInstance(validatedEntity, crudEventType);\r
- Set<EntityConstraintViolation> errorEntities = new HashSet<EntityConstraintViolation>(errors.size());\r
- for (ConstraintViolation<T> error : errors) {\r
- EntityConstraintViolation errorEntity = EntityConstraintViolation.newInstance(validatedEntity, error);\r
- errorEntities.add(errorEntity);\r
- }\r
- entityValidation.setEntityConstraintViolations(errorEntities);\r
- return entityValidation;\r
- }\r
-\r
- private static void deletedErrorRecords(Connection conn, int validationResultId, Class<?>[] validationGroups)\r
- throws SQLException {\r
- StringBuilder sql = new StringBuilder(127);\r
- sql.append("DELETE FROM entityconstraintviolation WHERE entityvalidation_id = ?");\r
- if (validationGroups != null && validationGroups.length != 0) {\r
- sql.append(" AND (");\r
- for (int i = 0; i < validationGroups.length; ++i) {\r
- if (i != 0) {\r
- sql.append(" OR ");\r
- }\r
- sql.append("validationgroup = ?");\r
- }\r
- sql.append(")");\r
- }\r
- PreparedStatement stmt = null;\r
- try {\r
- stmt = conn.prepareStatement(sql.toString());\r
- stmt.setInt(1, validationResultId);\r
- if (validationGroups != null && validationGroups.length != 0) {\r
- for (int i = 0; i < validationGroups.length; ++i) {\r
- stmt.setString(i + 2, validationGroups[i].getName());\r
- }\r
- }\r
- stmt.executeUpdate();\r
- } finally {\r
- JdbcDaoUtils.close(stmt);\r
- }\r
- }\r
-\r
- private static void deleteObsoleteErrors(Connection conn, EntityValidation previousValidation) throws SQLException {\r
- Set<EntityConstraintViolation> obsoleteErrors = previousValidation.getEntityConstraintViolations();\r
- if (obsoleteErrors == null || obsoleteErrors.size() == 0) {\r
- return;\r
- }\r
- String sql = "DELETE FROM entityconstraintviolation WHERE id = ?";\r
- PreparedStatement stmt = null;\r
- try {\r
- stmt = conn.prepareStatement(sql.toString());\r
- for (EntityConstraintViolation error : obsoleteErrors) {\r
- stmt.setInt(1, error.getId());\r
- stmt.executeUpdate();\r
- }\r
- } finally {\r
- JdbcDaoUtils.close(stmt);\r
- }\r
- }\r
-\r
- // Save EntityValidation entity to database. As a side effect\r
- // the database id assigned to the entity will be set on the\r
- // EntityValidation instance\r
- private static void saveEntityValidationRecord(Connection conn, EntityValidation newValidation) throws SQLException {\r
- PreparedStatement stmt = null;\r
- try {\r
- stmt = conn.prepareStatement(SQL_INSERT_VALIDATION_RESULT);\r
- if (newValidation.getId() <= 0) {\r
- int id = 10 + JdbcDaoUtils.fetchInt(conn, "SELECT MAX(id) FROM entityvalidation");\r
- newValidation.setId(id);\r
- }\r
- stmt.setInt(vr_id, newValidation.getId());\r
- stmt.setDate(vr_created, new Date(newValidation.getCreated().getMillis()));\r
- stmt.setString(vr_uuid, newValidation.getUuid().toString());\r
- stmt.setString(vr_crudeventtype, newValidation.getCrudEventType().toString());\r
- stmt.setString(vr_validatedentityclass, newValidation.getValidatedEntityClass());\r
- stmt.setInt(vr_validatedentityid, newValidation.getValidatedEntityId());\r
- stmt.setString(vr_validatedentityuuid, newValidation.getValidatedEntityUuid().toString());\r
- stmt.setString(vr_userfriendlydescription, newValidation.getUserFriendlyDescription());\r
- stmt.setString(vr_userfriendlytypename, newValidation.getUserFriendlyTypeName());\r
- stmt.setInt(vr_validationcount, 1);\r
- stmt.setDate(vr_updated, new Date(newValidation.getCreated().getMillis()));\r
- stmt.setString(vr_status, EntityValidationStatus.IN_PROGRESS.toString());\r
- if (newValidation.getCreatedBy() != null) {\r
- stmt.setInt(vr_createdby_id, newValidation.getCreatedBy().getId());\r
- } else {\r
- stmt.setNull(vr_createdby_id, Types.INTEGER);\r
- }\r
- stmt.executeUpdate();\r
- } finally {\r
- JdbcDaoUtils.close(stmt);\r
- }\r
- }\r
-\r
- private static void increaseValidationCounter(Connection conn, EntityValidation entityValidation)\r
- throws SQLException {\r
- String sql = "UPDATE entityvalidation SET crudeventtype=?, validationcount = validationcount + 1, "\r
- + "updated = ?, status = ? WHERE id=?";\r
- PreparedStatement stmt = null;\r
- try {\r
- stmt = conn.prepareStatement(sql);\r
- if (entityValidation.getCrudEventType() == null) {\r
- stmt.setString(1, null);\r
- } else {\r
- stmt.setString(1, entityValidation.getCrudEventType().toString());\r
- }\r
- stmt.setDate(2, new Date(new java.util.Date().getTime()));\r
- stmt.setString(3, EntityValidationStatus.IN_PROGRESS.toString());\r
- stmt.setInt(4, entityValidation.getId());\r
- stmt.executeUpdate();\r
- } finally {\r
- JdbcDaoUtils.close(stmt);\r
- }\r
- }\r
-\r
- private static <T extends ICdmBase> void saveErrorRecords(Connection conn, EntityValidation entityValidation)\r
- throws SQLException {\r
- Set<EntityConstraintViolation> errors = entityValidation.getEntityConstraintViolations();\r
- if (errors == null || errors.size() == 0) {\r
- return;\r
- }\r
- PreparedStatement stmt = null;\r
- try {\r
- stmt = conn.prepareStatement(SQL_INSERT_CONSTRAINT_VIOLATION);\r
- for (EntityConstraintViolation error : errors) {\r
- if (error.getId() <= 0) {\r
- int id = 10 + JdbcDaoUtils.fetchInt(conn, "SELECT MAX(id) FROM entityconstraintviolation");\r
- error.setId(id);\r
- }\r
- stmt.setInt(cv_id, error.getId());\r
- stmt.setDate(cv_created, new Date(error.getCreated().getMillis()));\r
- stmt.setString(cv_uuid, error.getUuid().toString());\r
- stmt.setString(cv_invalidvalue, error.getInvalidValue());\r
- stmt.setString(cv_message, error.getMessage());\r
- stmt.setString(cv_propertypath, error.getPropertyPath());\r
- stmt.setString(cv_userfriendlyfieldname, error.getUserFriendlyFieldName());\r
- stmt.setString(cv_severity, error.getSeverity().toString());\r
- stmt.setString(cv_validator, error.getValidator());\r
- stmt.setString(cv_validationgroup, error.getValidationGroup());\r
- if (error.getCreatedBy() != null) {\r
- stmt.setInt(cv_createdby_id, error.getCreatedBy().getId());\r
- } else {\r
- stmt.setNull(cv_createdby_id, Types.INTEGER);\r
- }\r
- stmt.setInt(cv_entityvalidation_id, entityValidation.getId());\r
- stmt.executeUpdate();\r
- }\r
- } finally {\r
- JdbcDaoUtils.close(stmt);\r
- }\r
- }\r
-\r
- // Called by unit test\r
- EntityValidation getEntityValidation(String validatedEntityClass, int validatedEntityId) {\r
- Connection conn = null;\r
- try {\r
- conn = datasource.getConnection();\r
- JdbcDaoUtils.startTransaction(conn);\r
- EntityValidation result = getEntityValidation(conn, validatedEntityClass, validatedEntityId);\r
- conn.commit();\r
- return result;\r
- } catch (Throwable t) {\r
- logger.error("Error while retrieving validation result", t);\r
- JdbcDaoUtils.rollback(conn);\r
- return null;\r
- }\r
- }\r
-\r
- private static EntityValidation getEntityValidation(Connection conn, String validatedEntityClass,\r
- int validatedEntityId) throws SQLException {\r
- EntityValidation entityValidation = getEntityValidationRecord(conn, validatedEntityClass, validatedEntityId);\r
- if (entityValidation != null) {\r
- entityValidation.setEntityConstraintViolations(getErrorRecords(conn, entityValidation.getId()));\r
- }\r
- return entityValidation;\r
- }\r
-\r
- private static void deleteValidationResultRecord(Connection conn, int validationResultId) throws SQLException {\r
- String sql = "DELETE FROM entityvalidation WHERE id = ?";\r
- PreparedStatement stmt = conn.prepareStatement(sql);\r
- stmt.setInt(1, validationResultId);\r
- stmt.executeUpdate();\r
- }\r
-\r
- private static void setStatus(Connection conn, EntityValidation entityValidation, EntityValidationStatus status) {\r
- if (conn == null || entityValidation == null || entityValidation.getId() <= 0) {\r
- logger.warn("Failed to save entity validation status to database");\r
- return;\r
- }\r
- String sql = "UPDATE entityvalidation SET status = ? WHERE id = ?";\r
- PreparedStatement stmt = null;\r
- try {\r
- JdbcDaoUtils.startTransaction(conn);\r
- stmt = conn.prepareStatement(sql);\r
- stmt.setString(1, status.toString());\r
- stmt.setInt(2, entityValidation.getId());\r
- stmt.executeUpdate();\r
- conn.commit();\r
- } catch (Throwable t) {\r
- logger.error("Failed to set validation status", t);\r
- } finally {\r
- JdbcDaoUtils.close(stmt);\r
- }\r
- }\r
-\r
- private static <T extends ICdmBase> EntityValidation getEntityValidationRecord(Connection conn,\r
- String validatedEntityClass, int validatedEntityId) throws SQLException {\r
- String sql = "SELECT * FROM entityvalidation WHERE validatedentityclass=? AND validatedentityid=?";\r
- EntityValidation result = null;\r
- PreparedStatement stmt = null;\r
- try {\r
- stmt = conn.prepareStatement(sql);\r
- stmt.setString(1, validatedEntityClass);\r
- stmt.setInt(2, validatedEntityId);\r
- ResultSet rs = stmt.executeQuery();\r
- if (rs.next()) {\r
- result = EntityValidation.newInstance();\r
- result.setId(rs.getInt("id"));\r
- Date d = rs.getDate("created");\r
- if (!rs.wasNull()) {\r
- result.setCreated(new DateTime(d.getTime()));\r
- }\r
- String s = rs.getString("uuid");\r
- if (!rs.wasNull()) {\r
- result.setUuid(UUID.fromString(rs.getString("uuid")));\r
- }\r
- s = rs.getString("crudeventtype");\r
- if (!rs.wasNull()) {\r
- result.setCrudEventType(CRUDEventType.valueOf(s));\r
- }\r
- result.setValidatedEntityClass(rs.getString("validatedentityclass"));\r
- result.setValidatedEntityId(rs.getInt("validatedentityid"));\r
- s = rs.getString("validatedentityuuid");\r
- if (!rs.wasNull()) {\r
- result.setValidatedEntityUuid(UUID.fromString(s));\r
- }\r
- result.setUserFriendlyDescription(rs.getString("userfriendlydescription"));\r
- result.setUserFriendlyTypeName(rs.getString("userfriendlytypename"));\r
- }\r
- rs.close();\r
- return result;\r
- } finally {\r
- JdbcDaoUtils.close(stmt);\r
- }\r
- }\r
-\r
- private static int getValidationResultId(Connection conn, String validatedEntityClass, int validatedEntityId)\r
- throws SQLException {\r
- String sql = "SELECT id FROM entityvalidation WHERE validatedentityclass = ? AND validatedentityid = ?";\r
- PreparedStatement stmt = null;\r
- int result = -1;\r
- try {\r
- stmt = conn.prepareStatement(sql);\r
- stmt.setString(1, validatedEntityClass);\r
- stmt.setInt(2, validatedEntityId);\r
- ResultSet rs = stmt.executeQuery();\r
- if (rs.next()) {\r
- result = rs.getInt(1);\r
- }\r
- rs.close();\r
- } finally {\r
- JdbcDaoUtils.close(stmt);\r
- }\r
- return result;\r
- }\r
-\r
- private static Set<EntityConstraintViolation> getErrorRecords(Connection conn, int entityValidationId)\r
- throws SQLException {\r
- return getErrorRecordsForValidationGroup(conn, entityValidationId, null);\r
- }\r
-\r
- private static Set<EntityConstraintViolation> getErrorRecordsForValidationGroup(Connection conn,\r
- int entityValidationId, Class<?>[] validationGroups) throws SQLException {\r
- StringBuilder sql = new StringBuilder("SELECT * FROM entityconstraintviolation WHERE entityvalidation_id=?");\r
- if (validationGroups != null && validationGroups.length != 0) {\r
- sql.append(" AND (");\r
- for (int i = 0; i < validationGroups.length; ++i) {\r
- if (i != 0) {\r
- sql.append(" OR ");\r
- }\r
- sql.append("validationgroup = ?");\r
- }\r
- sql.append(")");\r
- }\r
- PreparedStatement stmt = null;\r
- Set<EntityConstraintViolation> errors = new HashSet<EntityConstraintViolation>();\r
- try {\r
- stmt = conn.prepareStatement(sql.toString());\r
- stmt.setInt(1, entityValidationId);\r
- if (validationGroups != null && validationGroups.length != 0) {\r
- for (int i = 0; i < validationGroups.length; ++i) {\r
- stmt.setString(i + 2, validationGroups[i].getName());\r
- }\r
- }\r
- ResultSet rs = stmt.executeQuery();\r
- while (rs.next()) {\r
- EntityConstraintViolation error = EntityConstraintViolation.newInstance();\r
- error.setId(rs.getInt("id"));\r
- error.setCreated(new DateTime(rs.getDate("created").getTime()));\r
- error.setUuid(UUID.fromString(rs.getString("uuid")));\r
- error.setInvalidValue(rs.getString("invalidvalue"));\r
- error.setMessage(rs.getString("message"));\r
- error.setPropertyPath(rs.getString("propertypath"));\r
- error.setUserFriendlyFieldName(rs.getString("userfriendlyfieldname"));\r
- error.setSeverity(Severity.forName(rs.getString("severity")));\r
- error.setValidator(rs.getString("validator"));\r
- error.setValidationGroup(rs.getString("validationgroup"));\r
- errors.add(error);\r
- }\r
- rs.close();\r
- } finally {\r
- JdbcDaoUtils.close(stmt);\r
- }\r
- return errors;\r
- }\r
-\r
- private static void dontDeleteErrorsInOtherValidationGroups(EntityValidation previousValidation,\r
- Class<?>[] validationGroups) {\r
- Set<String> classNames = new HashSet<String>(validationGroups.length);\r
- for (Class<?> c : validationGroups) {\r
- classNames.add(c.getName());\r
- }\r
- Iterator<EntityConstraintViolation> iterator = previousValidation.getEntityConstraintViolations().iterator();\r
- while (iterator.hasNext()) {\r
- if (!classNames.contains(iterator.next().getValidationGroup())) {\r
- iterator.remove();\r
- }\r
- }\r
- }\r
-\r
-}\r
+// $Id$
+/**
+ * Copyright (C) 2015 EDIT
+ * European Distributed Institute of Taxonomy
+ * http://www.e-taxonomy.eu
+ *
+ * The contents of this file are subject to the Mozilla Public License Version 1.1
+ * See LICENSE.TXT at the top of this package for the full license terms.
+ */
+package eu.etaxonomy.cdm.persistence.dao.jdbc.validation;
+
+import java.sql.Connection;
+import java.sql.Date;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Types;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.Set;
+import java.util.UUID;
+
+import javax.sql.DataSource;
+import javax.validation.ConstraintViolation;
+
+import org.apache.log4j.Logger;
+import org.joda.time.DateTime;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.stereotype.Repository;
+
+import eu.etaxonomy.cdm.model.common.ICdmBase;
+import eu.etaxonomy.cdm.model.validation.CRUDEventType;
+import eu.etaxonomy.cdm.model.validation.EntityConstraintViolation;
+import eu.etaxonomy.cdm.model.validation.EntityValidation;
+import eu.etaxonomy.cdm.model.validation.EntityValidationStatus;
+import eu.etaxonomy.cdm.model.validation.Severity;
+import eu.etaxonomy.cdm.persistence.dao.jdbc.JdbcDaoUtils;
+import eu.etaxonomy.cdm.persistence.dao.validation.IEntityValidationCrud;
+
+/**
+ * @author ayco_holleman
+ * @date 16 jan. 2015
+ *
+ */
+@Repository
+public class EntityValidationCrudJdbcImpl implements IEntityValidationCrud {
+
+ public static final Logger logger = Logger.getLogger(EntityValidationCrudJdbcImpl.class);
+
+ private static final String SQL_INSERT_VALIDATION_RESULT = "INSERT INTO entityvalidation"
+ + "(id, created, uuid, crudeventtype, validatedentityclass, validatedentityid,"
+ + "validatedentityuuid, userfriendlydescription, userfriendlytypename, validationcount,"
+ + "updated, status, createdby_id) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?)";
+
+ private static final int vr_id = 1;
+ private static final int vr_created = 2;
+ private static final int vr_uuid = 3;
+ private static final int vr_crudeventtype = 4;
+ private static final int vr_validatedentityclass = 5;
+ private static final int vr_validatedentityid = 6;
+ private static final int vr_validatedentityuuid = 7;
+ private static final int vr_userfriendlydescription = 8;
+ private static final int vr_userfriendlytypename = 9;
+ private static final int vr_validationcount = 10;
+ private static final int vr_updated = 11;
+ private static final int vr_status = 12;
+ private static final int vr_createdby_id = 13;
+
+ private static final String SQL_INSERT_CONSTRAINT_VIOLATION = "INSERT INTO entityconstraintviolation"
+ + "(id, created, uuid, invalidvalue, message, propertypath, userfriendlyfieldname, severity,"
+ + "validator, validationgroup, createdby_id, entityvalidation_id) VALUES (?,?,?,?,?,?,?,?,?,?,?,?)";
+
+ private static final int cv_id = 1;
+ private static final int cv_created = 2;
+ private static final int cv_uuid = 3;
+ private static final int cv_invalidvalue = 4;
+ private static final int cv_message = 5;
+ private static final int cv_propertypath = 6;
+ private static final int cv_userfriendlyfieldname = 7;
+ private static final int cv_severity = 8;
+ private static final int cv_validator = 9;
+ private static final int cv_validationgroup = 10;
+ private static final int cv_createdby_id = 11;
+ private static final int cv_entityvalidation_id = 12;
+
+ @Autowired
+ private DataSource datasource;
+
+ public EntityValidationCrudJdbcImpl() {
+
+ }
+
+ public EntityValidationCrudJdbcImpl(DataSource datasource) {
+ this.datasource = datasource;
+ }
+
+ public void setDatasource(DataSource datasource) {
+ this.datasource = datasource;
+ }
+
+ @Override
+ public <T extends ICdmBase> void saveEntityValidation(T validatedEntity, Set<ConstraintViolation<T>> errors,
+ CRUDEventType crudEventType, Class<?>[] validationGroups) {
+ saveEntityValidation(createEntityValidation(validatedEntity, errors, crudEventType), validationGroups);
+ }
+
+ // This is the method that's tested by the unit tests
+ // rather than the interface method above, because it
+ // is almost impossible to create a mock instance of
+ // ConstraintViolation<T>
+ void saveEntityValidation(EntityValidation newValidation, Class<?>[] validationGroups) {
+ Connection conn = null;
+ EntityValidation tmp = null;
+ try {
+ conn = datasource.getConnection();
+ JdbcDaoUtils.startTransaction(conn);
+ String entityClass = newValidation.getValidatedEntityClass();
+ int entityId = newValidation.getValidatedEntityId();
+ EntityValidation oldValidation = getEntityValidation(conn, entityClass, entityId);
+ if (oldValidation == null) {
+ tmp = newValidation;
+ /*
+ * The entity has never been validated before. We should now
+ * create an entityvalidation record whether or not the entity
+ * has errors, because the entity HAS been validated so its
+ * validationcount is now 1.
+ */
+ saveEntityValidationRecord(conn, newValidation);
+ Set<EntityConstraintViolation> errors = newValidation.getEntityConstraintViolations();
+ if (errors != null && errors.size() != 0) {
+ saveErrorRecords(conn, newValidation);
+ }
+
+ } else {
+ tmp = oldValidation;
+ // Increase validation counter
+ increaseValidationCounter(conn, oldValidation);
+
+ // Delete obsolete errors, that is, errors from the previous
+ // validation that have disappeared from the new validation
+ // even though they belong to the same validation group
+ dontDeleteErrorsInOtherValidationGroups(oldValidation, validationGroups);
+ // Now all errors have been removed from the previous validation
+ // that don't belong to the validation group(s) applied by the
+ // current validation. Set them apart because we need them
+ HashSet<EntityConstraintViolation> oldErrors = new HashSet<EntityConstraintViolation>(
+ oldValidation.getEntityConstraintViolations());
+ oldValidation.getEntityConstraintViolations().removeAll(newValidation.getEntityConstraintViolations());
+ // Now we're left with previous errors that have disappeared
+ // from the current validation (they have become obsolete)
+ deleteObsoleteErrors(conn, oldValidation);
+
+ // From the new errors delete all that are identical to
+ // errors from a previous validation (identical as per the
+ // equals() method of EntityConstraintViolation). These
+ // errors will not replace the old ones in order to limit
+ // the number of INSERTs.
+ newValidation.getEntityConstraintViolations().removeAll(oldErrors);
+ saveErrorRecords(conn, newValidation);
+ }
+ conn.commit();
+ setStatus(conn, tmp, EntityValidationStatus.OK);
+ } catch (Throwable t) {
+ logger.error("Error while saving validation result:", t);
+ setStatus(conn, tmp, EntityValidationStatus.ERROR);
+ JdbcDaoUtils.rollback(conn);
+ } finally {
+ JdbcDaoUtils.close(conn);
+ }
+ }
+
+ @Override
+ public void deleteEntityValidation(String validatedEntityClass, int validatedEntityId) {
+ Connection conn = null;
+ try {
+ conn = datasource.getConnection();
+ JdbcDaoUtils.startTransaction(conn);
+ int validationResultId = getValidationResultId(conn, validatedEntityClass, validatedEntityId);
+ if (validationResultId == -1) {
+ return;
+ }
+ deleteValidationResultRecord(conn, validationResultId);
+ deletedErrorRecords(conn, validationResultId, null);
+ conn.commit();
+ } catch (Throwable t) {
+ JdbcDaoUtils.rollback(conn);
+ }
+ JdbcDaoUtils.close(conn);
+ }
+
+ private static <T extends ICdmBase> EntityValidation createEntityValidation(T validatedEntity,
+ Set<ConstraintViolation<T>> errors, CRUDEventType crudEventType) {
+ EntityValidation entityValidation = EntityValidation.newInstance(validatedEntity, crudEventType);
+ Set<EntityConstraintViolation> errorEntities = new HashSet<EntityConstraintViolation>(errors.size());
+ for (ConstraintViolation<T> error : errors) {
+ EntityConstraintViolation errorEntity = EntityConstraintViolation.newInstance(validatedEntity, error);
+ errorEntities.add(errorEntity);
+ }
+ entityValidation.setEntityConstraintViolations(errorEntities);
+ return entityValidation;
+ }
+
+ private static void deletedErrorRecords(Connection conn, int validationResultId, Class<?>[] validationGroups)
+ throws SQLException {
+ StringBuilder sql = new StringBuilder(127);
+ sql.append("DELETE FROM entityconstraintviolation WHERE entityvalidation_id = ?");
+ if (validationGroups != null && validationGroups.length != 0) {
+ sql.append(" AND (");
+ for (int i = 0; i < validationGroups.length; ++i) {
+ if (i != 0) {
+ sql.append(" OR ");
+ }
+ sql.append("validationgroup = ?");
+ }
+ sql.append(")");
+ }
+ PreparedStatement stmt = null;
+ try {
+ stmt = conn.prepareStatement(sql.toString());
+ stmt.setInt(1, validationResultId);
+ if (validationGroups != null && validationGroups.length != 0) {
+ for (int i = 0; i < validationGroups.length; ++i) {
+ stmt.setString(i + 2, validationGroups[i].getName());
+ }
+ }
+ stmt.executeUpdate();
+ } finally {
+ JdbcDaoUtils.close(stmt);
+ }
+ }
+
+ private static void deleteObsoleteErrors(Connection conn, EntityValidation previousValidation) throws SQLException {
+ Set<EntityConstraintViolation> obsoleteErrors = previousValidation.getEntityConstraintViolations();
+ if (obsoleteErrors == null || obsoleteErrors.size() == 0) {
+ return;
+ }
+ String sql = "DELETE FROM entityconstraintviolation WHERE id = ?";
+ PreparedStatement stmt = null;
+ try {
+ stmt = conn.prepareStatement(sql.toString());
+ for (EntityConstraintViolation error : obsoleteErrors) {
+ stmt.setInt(1, error.getId());
+ stmt.executeUpdate();
+ }
+ } finally {
+ JdbcDaoUtils.close(stmt);
+ }
+ }
+
+ // Save EntityValidation entity to database. As a side effect
+ // the database id assigned to the entity will be set on the
+ // EntityValidation instance
+ private static void saveEntityValidationRecord(Connection conn, EntityValidation newValidation) throws SQLException {
+ PreparedStatement stmt = null;
+ try {
+ stmt = conn.prepareStatement(SQL_INSERT_VALIDATION_RESULT);
+ if (newValidation.getId() <= 0) {
+ int id = 10 + JdbcDaoUtils.fetchInt(conn, "SELECT MAX(id) FROM entityvalidation");
+ newValidation.setId(id);
+ }
+ stmt.setInt(vr_id, newValidation.getId());
+ stmt.setDate(vr_created, new Date(newValidation.getCreated().getMillis()));
+ stmt.setString(vr_uuid, newValidation.getUuid().toString());
+ stmt.setString(vr_crudeventtype, newValidation.getCrudEventType().toString());
+ stmt.setString(vr_validatedentityclass, newValidation.getValidatedEntityClass());
+ stmt.setInt(vr_validatedentityid, newValidation.getValidatedEntityId());
+ stmt.setString(vr_validatedentityuuid, newValidation.getValidatedEntityUuid().toString());
+ stmt.setString(vr_userfriendlydescription, newValidation.getUserFriendlyDescription());
+ stmt.setString(vr_userfriendlytypename, newValidation.getUserFriendlyTypeName());
+ stmt.setInt(vr_validationcount, 1);
+ stmt.setDate(vr_updated, new Date(newValidation.getCreated().getMillis()));
+ stmt.setString(vr_status, EntityValidationStatus.IN_PROGRESS.toString());
+ if (newValidation.getCreatedBy() != null) {
+ stmt.setInt(vr_createdby_id, newValidation.getCreatedBy().getId());
+ } else {
+ stmt.setNull(vr_createdby_id, Types.INTEGER);
+ }
+ stmt.executeUpdate();
+ } finally {
+ JdbcDaoUtils.close(stmt);
+ }
+ }
+
+ private static void increaseValidationCounter(Connection conn, EntityValidation entityValidation)
+ throws SQLException {
+ String sql = "UPDATE entityvalidation SET crudeventtype=?, validationcount = validationcount + 1, "
+ + "updated = ?, status = ? WHERE id=?";
+ PreparedStatement stmt = null;
+ try {
+ stmt = conn.prepareStatement(sql);
+ if (entityValidation.getCrudEventType() == null) {
+ stmt.setString(1, null);
+ } else {
+ stmt.setString(1, entityValidation.getCrudEventType().toString());
+ }
+ stmt.setDate(2, new Date(new java.util.Date().getTime()));
+ stmt.setString(3, EntityValidationStatus.IN_PROGRESS.toString());
+ stmt.setInt(4, entityValidation.getId());
+ stmt.executeUpdate();
+ } finally {
+ JdbcDaoUtils.close(stmt);
+ }
+ }
+
+ private static <T extends ICdmBase> void saveErrorRecords(Connection conn, EntityValidation entityValidation)
+ throws SQLException {
+ Set<EntityConstraintViolation> errors = entityValidation.getEntityConstraintViolations();
+ if (errors == null || errors.size() == 0) {
+ return;
+ }
+ PreparedStatement stmt = null;
+ try {
+ stmt = conn.prepareStatement(SQL_INSERT_CONSTRAINT_VIOLATION);
+ for (EntityConstraintViolation error : errors) {
+ if (error.getId() <= 0) {
+ int id = 10 + JdbcDaoUtils.fetchInt(conn, "SELECT MAX(id) FROM entityconstraintviolation");
+ error.setId(id);
+ }
+ stmt.setInt(cv_id, error.getId());
+ stmt.setDate(cv_created, new Date(error.getCreated().getMillis()));
+ stmt.setString(cv_uuid, error.getUuid().toString());
+ stmt.setString(cv_invalidvalue, error.getInvalidValue());
+ stmt.setString(cv_message, error.getMessage());
+ stmt.setString(cv_propertypath, error.getPropertyPath());
+ stmt.setString(cv_userfriendlyfieldname, error.getUserFriendlyFieldName());
+ stmt.setString(cv_severity, error.getSeverity().toString());
+ stmt.setString(cv_validator, error.getValidator());
+ stmt.setString(cv_validationgroup, error.getValidationGroup());
+ if (error.getCreatedBy() != null) {
+ stmt.setInt(cv_createdby_id, error.getCreatedBy().getId());
+ } else {
+ stmt.setNull(cv_createdby_id, Types.INTEGER);
+ }
+ stmt.setInt(cv_entityvalidation_id, entityValidation.getId());
+ stmt.executeUpdate();
+ }
+ } finally {
+ JdbcDaoUtils.close(stmt);
+ }
+ }
+
+ // Called by unit test
+ EntityValidation getEntityValidation(String validatedEntityClass, int validatedEntityId) {
+ Connection conn = null;
+ try {
+ conn = datasource.getConnection();
+ JdbcDaoUtils.startTransaction(conn);
+ EntityValidation result = getEntityValidation(conn, validatedEntityClass, validatedEntityId);
+ conn.commit();
+ return result;
+ } catch (Throwable t) {
+ logger.error("Error while retrieving validation result", t);
+ JdbcDaoUtils.rollback(conn);
+ return null;
+ }
+ }
+
+ private static EntityValidation getEntityValidation(Connection conn, String validatedEntityClass,
+ int validatedEntityId) throws SQLException {
+ EntityValidation entityValidation = getEntityValidationRecord(conn, validatedEntityClass, validatedEntityId);
+ if (entityValidation != null) {
+ entityValidation.setEntityConstraintViolations(getErrorRecords(conn, entityValidation.getId()));
+ }
+ return entityValidation;
+ }
+
+ private static void deleteValidationResultRecord(Connection conn, int validationResultId) throws SQLException {
+ String sql = "DELETE FROM entityvalidation WHERE id = ?";
+ PreparedStatement stmt = conn.prepareStatement(sql);
+ stmt.setInt(1, validationResultId);
+ stmt.executeUpdate();
+ }
+
+ private static void setStatus(Connection conn, EntityValidation entityValidation, EntityValidationStatus status) {
+ if (conn == null || entityValidation == null || entityValidation.getId() <= 0) {
+ logger.warn("Failed to save entity validation status to database");
+ return;
+ }
+ String sql = "UPDATE entityvalidation SET status = ? WHERE id = ?";
+ PreparedStatement stmt = null;
+ try {
+ JdbcDaoUtils.startTransaction(conn);
+ stmt = conn.prepareStatement(sql);
+ stmt.setString(1, status.toString());
+ stmt.setInt(2, entityValidation.getId());
+ stmt.executeUpdate();
+ conn.commit();
+ } catch (Throwable t) {
+ logger.error("Failed to set validation status", t);
+ } finally {
+ JdbcDaoUtils.close(stmt);
+ }
+ }
+
+ private static <T extends ICdmBase> EntityValidation getEntityValidationRecord(Connection conn,
+ String validatedEntityClass, int validatedEntityId) throws SQLException {
+ String sqlCount = "SELECT count(*) as n FROM entityvalidation";
+ PreparedStatement stmtCount = conn.prepareStatement(sqlCount);
+ ResultSet rsCount = stmtCount.executeQuery();
+ if (rsCount.next()) {
+ int n = rsCount.getInt("n");
+ System.out.println("count=" + n);
+ }
+
+ String sql = "SELECT * FROM entityvalidation WHERE validatedentityclass=? AND validatedentityid=?";
+ EntityValidation result = null;
+ PreparedStatement stmt = null;
+ try {
+ stmt = conn.prepareStatement(sql);
+ stmt.setString(1, validatedEntityClass);
+ stmt.setInt(2, validatedEntityId);
+ ResultSet rs = stmt.executeQuery();
+ if (rs.next()) {
+ result = EntityValidation.newInstance();
+ result.setId(rs.getInt("id"));
+ Date d = rs.getDate("created");
+ if (!rs.wasNull()) {
+ result.setCreated(new DateTime(d.getTime()));
+ }
+ String s = rs.getString("uuid");
+ if (!rs.wasNull()) {
+ result.setUuid(UUID.fromString(rs.getString("uuid")));
+ }
+ s = rs.getString("crudeventtype");
+ if (!rs.wasNull()) {
+ result.setCrudEventType(CRUDEventType.valueOf(s));
+ }
+ result.setValidatedEntityClass(rs.getString("validatedentityclass"));
+ result.setValidatedEntityId(rs.getInt("validatedentityid"));
+ s = rs.getString("validatedentityuuid");
+ if (!rs.wasNull()) {
+ result.setValidatedEntityUuid(UUID.fromString(s));
+ }
+ result.setUserFriendlyDescription(rs.getString("userfriendlydescription"));
+ result.setUserFriendlyTypeName(rs.getString("userfriendlytypename"));
+ }
+ rs.close();
+ return result;
+ } finally {
+ JdbcDaoUtils.close(stmt);
+ }
+ }
+
+ private static int getValidationResultId(Connection conn, String validatedEntityClass, int validatedEntityId)
+ throws SQLException {
+ String sql = "SELECT id FROM entityvalidation WHERE validatedentityclass = ? AND validatedentityid = ?";
+ PreparedStatement stmt = null;
+ int result = -1;
+ try {
+ stmt = conn.prepareStatement(sql);
+ stmt.setString(1, validatedEntityClass);
+ stmt.setInt(2, validatedEntityId);
+ ResultSet rs = stmt.executeQuery();
+ if (rs.next()) {
+ result = rs.getInt(1);
+ }
+ rs.close();
+ } finally {
+ JdbcDaoUtils.close(stmt);
+ }
+ return result;
+ }
+
+ private static Set<EntityConstraintViolation> getErrorRecords(Connection conn, int entityValidationId)
+ throws SQLException {
+ return getErrorRecordsForValidationGroup(conn, entityValidationId, null);
+ }
+
+ private static Set<EntityConstraintViolation> getErrorRecordsForValidationGroup(Connection conn,
+ int entityValidationId, Class<?>[] validationGroups) throws SQLException {
+ StringBuilder sql = new StringBuilder("SELECT * FROM entityconstraintviolation WHERE entityvalidation_id=?");
+ if (validationGroups != null && validationGroups.length != 0) {
+ sql.append(" AND (");
+ for (int i = 0; i < validationGroups.length; ++i) {
+ if (i != 0) {
+ sql.append(" OR ");
+ }
+ sql.append("validationgroup = ?");
+ }
+ sql.append(")");
+ }
+ PreparedStatement stmt = null;
+ Set<EntityConstraintViolation> errors = new HashSet<EntityConstraintViolation>();
+ try {
+ stmt = conn.prepareStatement(sql.toString());
+ stmt.setInt(1, entityValidationId);
+ if (validationGroups != null && validationGroups.length != 0) {
+ for (int i = 0; i < validationGroups.length; ++i) {
+ stmt.setString(i + 2, validationGroups[i].getName());
+ }
+ }
+ ResultSet rs = stmt.executeQuery();
+ while (rs.next()) {
+ EntityConstraintViolation error = EntityConstraintViolation.newInstance();
+ error.setId(rs.getInt("id"));
+ error.setCreated(new DateTime(rs.getDate("created").getTime()));
+ error.setUuid(UUID.fromString(rs.getString("uuid")));
+ error.setInvalidValue(rs.getString("invalidvalue"));
+ error.setMessage(rs.getString("message"));
+ error.setPropertyPath(rs.getString("propertypath"));
+ error.setUserFriendlyFieldName(rs.getString("userfriendlyfieldname"));
+ error.setSeverity(Severity.forName(rs.getString("severity")));
+ error.setValidator(rs.getString("validator"));
+ error.setValidationGroup(rs.getString("validationgroup"));
+ errors.add(error);
+ }
+ rs.close();
+ } finally {
+ JdbcDaoUtils.close(stmt);
+ }
+ return errors;
+ }
+
+ private static void dontDeleteErrorsInOtherValidationGroups(EntityValidation previousValidation,
+ Class<?>[] validationGroups) {
+ Set<String> classNames = new HashSet<String>(validationGroups.length);
+ for (Class<?> c : validationGroups) {
+ classNames.add(c.getName());
+ }
+ Iterator<EntityConstraintViolation> iterator = previousValidation.getEntityConstraintViolations().iterator();
+ while (iterator.hasNext()) {
+ if (!classNames.contains(iterator.next().getValidationGroup())) {
+ iterator.remove();
+ }
+ }
+ }
+
+}
*/
public class MergeResult<T extends ICdmBase> implements Serializable {
+ private static final long serialVersionUID = 4886558003386941487L;
+
private T mergedEntity;
private Set<T> newEntities;
-/**\r
-* Copyright (C) 2007 EDIT\r
-* European Distributed Institute of Taxonomy\r
-* http://www.e-taxonomy.eu\r
-*\r
-* The contents of this file are subject to the Mozilla Public License Version 1.1\r
-* See LICENSE.TXT at the top of this package for the full license terms.\r
-*/\r
-\r
-package eu.etaxonomy.cdm.persistence.hibernate;\r
-\r
-import java.io.Serializable;\r
-\r
-import org.apache.log4j.Logger;\r
-import org.hibernate.EmptyInterceptor;\r
-import org.hibernate.Transaction;\r
-import org.hibernate.type.Type;\r
-import org.springframework.stereotype.Component;\r
-\r
-import eu.etaxonomy.cdm.model.common.CdmBase;\r
-\r
-/**\r
- * @author a.mueller\r
- *\r
- */\r
-@Component\r
-public class CdmHibernateInterceptor extends EmptyInterceptor {\r
- private static final long serialVersionUID = 2536017420460052854L;\r
- private static final Logger logger = Logger.getLogger(CdmHibernateInterceptor.class);\r
-\r
- //FIXME problem is circular dependency (see VocabularyStoreImpl.staticInitialized\r
-// @Autowired\r
-// VocabularyStoreImpl vocabularyStore;\r
-\r
- private int updates;\r
- private int creates;\r
- private int loads;\r
-\r
- public void onDelete(Object entity,\r
- Serializable id,\r
- Object[] state,\r
- String[] propertyNames,\r
- Type[] types) {\r
- // do nothing\r
- }\r
-\r
- public boolean onFlushDirty(Object entity,\r
- Serializable id,\r
- Object[] currentState,\r
- Object[] previousState,\r
- String[] propertyNames,\r
- Type[] types) {\r
- System.err.println("onFlushDirty...");\r
- boolean result = false;\r
- if ( entity instanceof CdmBase ) {\r
- updates++;\r
- //result &= checkTransientDefinedTerms(currentState);\r
- }\r
- return result;\r
- }\r
-\r
- public boolean onLoad(Object entity,\r
- Serializable id,\r
- Object[] state,\r
- String[] propertyNames,\r
- Type[] types) {\r
- if ( entity instanceof CdmBase ) {\r
- logger.warn("id = " +id);\r
- loads++;\r
- }\r
- return false;\r
- }\r
-\r
- public boolean onSave(Object entity,\r
- Serializable id,\r
- Object[] state,\r
- String[] propertyNames,\r
- Type[] types) {\r
- System.err.println("onSave...");\r
- boolean result = false;\r
- if ( entity instanceof CdmBase ) {\r
- creates++;\r
- //result &= checkTransientDefinedTerms(state);\r
- }\r
- return result;\r
- }\r
-\r
-\r
- private boolean checkTransientDefinedTerms(Object[] state){\r
- boolean result = false;\r
-// if (VocabularyStoreImpl.isInitialized()){\r
-// //logger.debug("Save: " + entity);\r
-// int i = -1;\r
-// for (Object singleState : state){\r
-// i++;\r
-// if (singleState instanceof DefinedTermBase){\r
-// DefinedTermBase term = ((DefinedTermBase)singleState);\r
-// if (term.getId() != 0){\r
-// continue;\r
-// }else{\r
-// //logger.debug(" " + singleState.getClass());\r
-// UUID uuid = term.getUuid();\r
-// DefinedTermBase storedTermBase = VocabularyStoreImpl.getCurrentVocabularyStore().getTermByUuid(uuid);\r
-// if (storedTermBase == null){\r
-// logger.warn("DefinedTermBase with uuid "+ uuid +" could not be found in vocabulary store. Term stays transient.");\r
-// }else if (uuid.equals(storedTermBase.getUuid())){\r
-// logger.debug("Changed transient term");\r
-// state[i] = storedTermBase;\r
-// result = true;\r
-// }else{\r
-// throw new IllegalStateException("UUID is not equal.");\r
-// }\r
-// }\r
-//\r
-// }\r
-// }\r
-// }else{ //not initialized\r
-//\r
-// }\r
- return result;\r
- }\r
-\r
- public void afterTransactionCompletion(Transaction tx) {\r
- if ( tx.wasCommitted() ) {\r
- logger.debug("Creations: " + creates + ", Updates: " + updates + ", Loads: " + loads);\r
- }\r
- updates=0;\r
- creates=0;\r
- loads=0;\r
- }\r
-\r
-}\r
-\r
+/**
+* Copyright (C) 2007 EDIT
+* European Distributed Institute of Taxonomy
+* http://www.e-taxonomy.eu
+*
+* The contents of this file are subject to the Mozilla Public License Version 1.1
+* See LICENSE.TXT at the top of this package for the full license terms.
+*/
+
+package eu.etaxonomy.cdm.persistence.hibernate;
+
+import java.io.Serializable;
+
+import org.apache.log4j.Logger;
+import org.hibernate.EmptyInterceptor;
+import org.hibernate.Transaction;
+import org.hibernate.resource.transaction.spi.TransactionStatus;
+import org.hibernate.type.Type;
+import org.springframework.stereotype.Component;
+
+import eu.etaxonomy.cdm.model.common.CdmBase;
+
+/**
+ * @author a.mueller
+ *
+ */
+@Component
+public class CdmHibernateInterceptor extends EmptyInterceptor {
+ private static final long serialVersionUID = 2536017420460052854L;
+ private static final Logger logger = Logger.getLogger(CdmHibernateInterceptor.class);
+
+ //FIXME problem is circular dependency (see VocabularyStoreImpl.staticInitialized
+// @Autowired
+// VocabularyStoreImpl vocabularyStore;
+
+ private int updates;
+ private int creates;
+ private int loads;
+
+ @Override
+ public void onDelete(Object entity,
+ Serializable id,
+ Object[] state,
+ String[] propertyNames,
+ Type[] types) {
+ // do nothing
+ }
+
+ @Override
+ public boolean onFlushDirty(Object entity,
+ Serializable id,
+ Object[] currentState,
+ Object[] previousState,
+ String[] propertyNames,
+ Type[] types) {
+ if (logger.isDebugEnabled()) {
+ logger.debug("onFlushDirty...");
+ }
+ boolean result = false;
+ if ( entity instanceof CdmBase ) {
+ updates++;
+ //result &= checkTransientDefinedTerms(currentState);
+ }
+ return result;
+ }
+
+ @Override
+ public boolean onLoad(Object entity,
+ Serializable id,
+ Object[] state,
+ String[] propertyNames,
+ Type[] types) {
+ if ( entity instanceof CdmBase ) {
+ if (logger.isDebugEnabled()) {
+ logger.debug("id = " +id);
+ }
+ loads++;
+ }
+ return false;
+ }
+
+ @Override
+ public boolean onSave(Object entity,
+ Serializable id,
+ Object[] state,
+ String[] propertyNames,
+ Type[] types) {
+ if (logger.isDebugEnabled()) {
+ logger.debug("onSave...");
+ }
+ boolean result = false;
+ if ( entity instanceof CdmBase ) {
+ creates++;
+ //result &= checkTransientDefinedTerms(state);
+ }
+ return result;
+ }
+
+
+ private boolean checkTransientDefinedTerms(Object[] state){
+ boolean result = false;
+// if (VocabularyStoreImpl.isInitialized()){
+// //logger.debug("Save: " + entity);
+// int i = -1;
+// for (Object singleState : state){
+// i++;
+// if (singleState instanceof DefinedTermBase){
+// DefinedTermBase term = ((DefinedTermBase)singleState);
+// if (term.getId() != 0){
+// continue;
+// }else{
+// //logger.debug(" " + singleState.getClass());
+// UUID uuid = term.getUuid();
+// DefinedTermBase storedTermBase = VocabularyStoreImpl.getCurrentVocabularyStore().getTermByUuid(uuid);
+// if (storedTermBase == null){
+// logger.warn("DefinedTermBase with uuid "+ uuid +" could not be found in vocabulary store. Term stays transient.");
+// }else if (uuid.equals(storedTermBase.getUuid())){
+// logger.debug("Changed transient term");
+// state[i] = storedTermBase;
+// result = true;
+// }else{
+// throw new IllegalStateException("UUID is not equal.");
+// }
+// }
+//
+// }
+// }
+// }else{ //not initialized
+//
+// }
+ return result;
+ }
+
+ @Override
+ public void afterTransactionCompletion(Transaction tx) {
+ if ( tx.getStatus() == TransactionStatus.COMMITTED ) {
+ logger.debug("Creations: " + creates + ", Updates: " + updates + ", Loads: " + loads);
+ }
+ updates=0;
+ creates=0;
+ loads=0;
+ }
+
+}
+
package eu.etaxonomy.cdm.persistence.hibernate;
import org.apache.log4j.Logger;
-import org.hibernate.cfg.Configuration;
+import org.hibernate.boot.Metadata;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.event.service.spi.EventListenerRegistry;
import org.hibernate.event.spi.EventType;
import org.hibernate.integrator.spi.Integrator;
-import org.hibernate.metamodel.source.MetadataImplementor;
import org.hibernate.service.spi.SessionFactoryServiceRegistry;
/**
public class CdmListenerIntegrator implements Integrator {
private static final Logger logger = Logger.getLogger(CdmListenerIntegrator.class);
-
- /*
- * (non-Javadoc)
- *
- * @see org.hibernate.integrator.spi.Integrator#integrate(org.hibernate.cfg.Configuration,
- * org.hibernate.engine.spi.SessionFactoryImplementor,
- * org.hibernate.service.spi.SessionFactoryServiceRegistry)
- */
@Override
- public void integrate(Configuration configuration, SessionFactoryImplementor sessionFactory, SessionFactoryServiceRegistry serviceRegistry){
- if (logger.isInfoEnabled()) {
+ public void integrate(Metadata metadata,
+ SessionFactoryImplementor sessionFactory,
+ SessionFactoryServiceRegistry serviceRegistry)
+ {
+ if (logger.isInfoEnabled()) {
logger.info("Registering event listeners");
}
eventRegistry.appendListeners(EventType.PRE_UPDATE, new CdmPreDataChangeListener());
}
-
- /*
- * (non-Javadoc)
- *
- * @see org.hibernate.integrator.spi.Integrator#integrate(org.hibernate.metamodel.source.
- * MetadataImplementor, org.hibernate.engine.spi.SessionFactoryImplementor,
- * org.hibernate.service.spi.SessionFactoryServiceRegistry)
- */
- @Override
- public void integrate(MetadataImplementor metadata, SessionFactoryImplementor sessionFactory, SessionFactoryServiceRegistry serviceRegistry){
- //nothing to do for now
- logger.warn("Metadata integrate not yet implemented");
- }
-
-
- /*
- * (non-Javadoc)
- *
- * @see org.hibernate.integrator.spi.Integrator#disintegrate(org.hibernate.engine.spi.
- * SessionFactoryImplementor, org.hibernate.service.spi.SessionFactoryServiceRegistry)
- */
@Override
- public void disintegrate(SessionFactoryImplementor sessionFactory, SessionFactoryServiceRegistry serviceRegistry)
+ public void disintegrate(SessionFactoryImplementor sessionFactory,
+ SessionFactoryServiceRegistry serviceRegistry)
{
//nothing to do for now
- logger.warn("Disintegrate not yet implemented");
+ logger.warn("Disintegrate ListenerIntegrator not yet implemented");
}
}
import org.apache.log4j.Logger;
import org.hibernate.MappingException;
-import org.hibernate.dialect.Dialect;
+import org.hibernate.service.ServiceRegistry;
import org.hibernate.type.Type;
/**
*</pre>
*
* If you set the optimizer to "none", hibernate will always query the database for each new id.
- * You must tell spring to intantiate the ... before the session factory:
+ * You must tell spring to instantiate the ... before the session factory:
*
* <pre>
- * <bean id="sessionFactory" class="org.springframework.orm.hibernate4.LocalSessionFactoryBean" depends-on="tableGeneratorGlobalOverride">
+ * <bean id="sessionFactory" class="org.springframework.orm.hibernate5.LocalSessionFactoryBean" depends-on="tableGeneratorGlobalOverride">
* ...
* </pre>
*
/**
* {@inheritDoc}
*/
- public void configure(Type type, Properties params, Dialect dialect) throws MappingException {
+ @Override
+ public void configure(Type type, Properties params, ServiceRegistry serviceRegistry) throws MappingException {
Properties overrideProperies = TableGeneratorGlobalOverride.getProperties();
if(overrideProperies != null) {
params.putAll(overrideProperies);
}
logger.debug("overrideProperies:" + (overrideProperies != null ? overrideProperies :"NULL"));
- super.configure(type, params, dialect);
+ super.configure(type, params, serviceRegistry);
}
// /**
-/**\r
-* Copyright (C) 2007 EDIT\r
-* European Distributed Institute of Taxonomy \r
-* http://www.e-taxonomy.eu\r
-* \r
-* The contents of this file are subject to the Mozilla Public License Version 1.1\r
-* See LICENSE.TXT at the top of this package for the full license terms.\r
-*/\r
-\r
-package org.hibernate.dialect;\r
-\r
-import java.sql.Types;\r
-\r
-import org.hibernate.dialect.H2Dialect;\r
-\r
-/**\r
- * Extends H2Dialect and registers Types.BIT for boolean and Types.FLOAT as double.\r
- * This is a work around for a known bug in the H2Dialect\r
- * \r
- * @see http://opensource.atlassian.com/projects/hibernate/browse/HHH-1598\r
- * @author a.mueller\r
- */\r
-public class H2CorrectedDialect extends H2Dialect {\r
-\r
- public H2CorrectedDialect() {\r
- super();\r
- registerColumnType(Types.BIT, "boolean");\r
- registerColumnType(Types.FLOAT, "double");\r
- \r
- \r
- }\r
-\r
- \r
+/**
+* Copyright (C) 2007 EDIT
+* European Distributed Institute of Taxonomy
+* http://www.e-taxonomy.eu
+*
+* The contents of this file are subject to the Mozilla Public License Version 1.1
+* See LICENSE.TXT at the top of this package for the full license terms.
+*/
+
+package org.hibernate.dialect;
+
+import java.sql.Types;
+
+/**
+ * Extends H2Dialect and registers Types.BIT for boolean and Types.FLOAT as double.
+ * This is a work around for a known bug in the H2Dialect
+ *
+ * @see http://opensource.atlassian.com/projects/hibernate/browse/HHH-1598
+ * @author a.mueller
+ */
+public class H2CorrectedDialect extends H2Dialect {
+
+ public H2CorrectedDialect() {
+ super();
+ registerColumnType(Types.FLOAT, "double"); //do we really want this
+
+
+ }
}
\ No newline at end of file
http://www.springframework.org/schema/aop http://www.springframework.org/schema/aop/spring-aop-4.0.xsd
">
- <bean id="transactionManager" class="org.springframework.orm.hibernate4.HibernateTransactionManager">
+ <bean id="transactionManager" class="org.springframework.orm.hibernate5.HibernateTransactionManager">
<property name="sessionFactory" ref="sessionFactory" />
</bean>
- <bean id="sessionFactory" class="org.springframework.orm.hibernate4.LocalSessionFactoryBean" depends-on="tableGeneratorGlobalOverride">
- <property name="namingStrategy">
- <bean class="org.hibernate.cfg.DefaultComponentSafeNamingStrategy" />
- </property>
+ <bean id="sessionFactory" class="org.springframework.orm.hibernate5.LocalSessionFactoryBean" depends-on="tableGeneratorGlobalOverride">
+ <property name="implicitNamingStrategy">
+ <bean class="org.hibernate.boot.model.naming.ImplicitNamingStrategyComponentPathImpl" />
+ </property>
+
<!-- <property name="entityInterceptor">
<bean class="eu.etaxonomy.cdm.persistence.hibernate.CdmHibernateInterceptor" />
-->
<property name="configLocation" value="classpath:eu/etaxonomy/cdm/hibernate.cfg.xml"/>
-<!-- Looks like this is not needed anymore for hibernate 4 -->
-<!-- <property name="configurationClass" value="org.hibernate.cfg.AnnotationConfiguration"/> -->
-
<!--
If dataSource is set, this will override corresponding settings in Hibernate properties.
If this is set, the Hibernate settings should not define a connection provider to
http://www.springframework.org/schema/aop http://www.springframework.org/schema/aop/spring-aop-4.0.xsd
">
- <bean id="transactionManager" class="org.springframework.orm.hibernate4.HibernateTransactionManager">
+ <bean id="transactionManager" class="org.springframework.orm.hibernate5.HibernateTransactionManager">
<property name="sessionFactory" ref="sessionFactory" />
</bean>
- <bean id="sessionFactory" class="org.springframework.orm.hibernate4.LocalSessionFactoryBean" depends-on="tableGeneratorGlobalOverride">
- <property name="namingStrategy">
- <bean class="org.hibernate.cfg.DefaultComponentSafeNamingStrategy" />
+ <bean id="sessionFactory" class="org.springframework.orm.hibernate5.LocalSessionFactoryBean" depends-on="tableGeneratorGlobalOverride">
+ <property name="implicitNamingStrategy">
+ <bean class="org.hibernate.boot.model.naming.ImplicitNamingStrategyComponentPathImpl" />
</property>
<property name="entityInterceptor" ref="securityHibernateInterceptor" />
<property name="configLocation" value="classpath:eu/etaxonomy/cdm/hibernate.cfg.xml"/>
- <!-- Looks like this is not needed anymore for hibernate 4 -->
-<!-- <property name="configurationClass" value="org.hibernate.cfg.AnnotationConfiguration"/> -->
-
<!--
If dataSource is set, this will override corresponding settings in Hibernate properties.
If this is set, the Hibernate settings should not define a connection provider to
import static org.junit.Assert.assertNull;
import java.io.FileNotFoundException;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.UUID;
import org.junit.Ignore;
import org.junit.Test;
-import org.unitils.dbunit.annotation.DataSet;
import org.unitils.spring.annotation.SpringApplicationContext;
import org.unitils.spring.annotation.SpringBeanByType;
-import eu.etaxonomy.cdm.model.common.DefinedTermBase;
import eu.etaxonomy.cdm.model.common.Language;
import eu.etaxonomy.cdm.model.common.TermVocabulary;
-import eu.etaxonomy.cdm.model.common.VocabularyEnum;
import eu.etaxonomy.cdm.model.name.Rank;
import eu.etaxonomy.cdm.test.integration.CdmIntegrationTest;
assertNotNull("TermInitializer should exist",persistentTermInitializer);
}
-
- @Test
- @DataSet("TermsDataSet.xml")
- public void testFirstPass() {
- Map<UUID, DefinedTermBase> persistedTerms = new HashMap<UUID, DefinedTermBase>();
- persistentTermInitializer.firstPass(VocabularyEnum.Rank, persistedTerms);
- }
+// As firstPass is not used anymore we also do not need this test
+// @Test
+// @DataSet("TermsDataSet.xml")
+// public void testFirstPass() {
+// Map<UUID, DefinedTermBase> persistedTerms = new HashMap<UUID, DefinedTermBase>();
+// persistentTermInitializer.firstPass(VocabularyEnum.Rank, persistedTerms);
+// }
/**
* Test method for {@link eu.etaxonomy.cdm.model.common.DefaultTermInitializer#initialize()}.
}
-
- /* (non-Javadoc)
- * @see eu.etaxonomy.cdm.test.integration.CdmIntegrationTest#createTestData()
- */
@Override
- public void createTestDataSet() throws FileNotFoundException {
- // TODO Auto-generated method stub
-
- }
-
-
-
+ public void createTestDataSet() throws FileNotFoundException {}
}
-/**\r
-* Copyright (C) 2009 EDIT\r
-* European Distributed Institute of Taxonomy\r
-* http://www.e-taxonomy.eu\r
-*\r
-* The contents of this file are subject to the Mozilla Public License Version 1.1\r
-* See LICENSE.TXT at the top of this package for the full license terms.\r
-*/\r
-\r
-package eu.etaxonomy.cdm.database;\r
-\r
-import java.sql.SQLException;\r
-import java.util.HashMap;\r
-import java.util.UUID;\r
-\r
-import javax.annotation.PostConstruct;\r
-import javax.sql.DataSource;\r
-\r
-import org.apache.log4j.Logger;\r
-import org.dbunit.database.DatabaseConfig;\r
-import org.dbunit.database.DatabaseConnection;\r
-import org.dbunit.database.IDatabaseConnection;\r
-import org.dbunit.dataset.IDataSet;\r
-import org.dbunit.dataset.xml.FlatXmlDataSetBuilder;\r
-import org.dbunit.ext.h2.H2DataTypeFactory;\r
-import org.dbunit.operation.DatabaseOperation;\r
-import org.springframework.beans.factory.annotation.Autowired;\r
-import org.springframework.core.io.Resource;\r
-import org.springframework.transaction.TransactionStatus;\r
-\r
-import eu.etaxonomy.cdm.model.common.DefinedTermBase;\r
-import eu.etaxonomy.cdm.model.common.VocabularyEnum;\r
-\r
-public class TestingTermInitializer extends PersistentTermInitializer {\r
- private static final Logger logger = Logger.getLogger(TestingTermInitializer.class);\r
-\r
- private DataSource dataSource;\r
-\r
- private Resource termsDataSet;\r
-\r
- private Resource termsDtd;\r
-\r
- public void setTermsDataSet(Resource termsDataSet) {\r
- this.termsDataSet = termsDataSet;\r
- }\r
-\r
- public void setTermsDtd(Resource termsDtd) {\r
- this.termsDtd = termsDtd;\r
- }\r
-\r
- @Autowired\r
- public void setDataSource(DataSource dataSource) {\r
- this.dataSource = dataSource;\r
- }\r
-\r
-\r
- @PostConstruct\r
- @Override\r
- public void initialize() {\r
- super.initialize();\r
- }\r
-\r
- @Override\r
- public void doInitialize(){\r
- logger.info("TestingTermInitializer initialize start ...");\r
- if (isOmit()){\r
- logger.info("TestingTermInitializer.omit == true, returning without initializing terms");\r
- return;\r
- } else {\r
- TransactionStatus txStatus = transactionManager.getTransaction(txDefinition);\r
- IDatabaseConnection connection = null;\r
-\r
- try {\r
-\r
- connection = getConnection();\r
-\r
-// MultiSchemaXmlDataSetFactory dataSetFactory = new MultiSchemaXmlDataSetFactory();\r
-// MultiSchemaDataSet multiSchemaDataset = dataSetFactory.createDataSet(termsDataSet.getFile());\r
-//\r
-// if(multiSchemaDataset != null){\r
-// for (String name : multiSchemaDataset.getSchemaNames()) {\r
-// IDataSet clearDataSet = multiSchemaDataset.getDataSetForSchema(name);\r
-// DatabaseOperation.CLEAN_INSERT.execute(connection, clearDataSet);\r
-// }\r
-// }\r
-\r
-// logger.info("loading data base schema from " + termsDtd.getFile().getAbsolutePath());\r
-// logger.info("loading data set from " + termsDataSet.getFile().getAbsolutePath());\r
-\r
-\r
- //old: IDataSet dataSet = new FlatXmlDataSet(new InputStreamReader(termsDataSet.getInputStream()),new InputStreamReader(termsDtd.getInputStream()));\r
-\r
- IDataSet dataSet = new FlatXmlDataSetBuilder()\r
- .setMetaDataSetFromDtd(termsDtd.getInputStream())\r
- .build(termsDataSet.getInputStream());\r
-\r
-\r
- //ITable definedTermBase = dataSet.getTable("DEFINEDTERMBASE");\r
-// for(int rowId = 0; rowId < definedTermBase.getRowCount(); rowId++) {\r
-// System.err.println(rowId + " : " + definedTermBase.getValue(rowId, "CREATEDBY_ID"));\r
-// }\r
- DatabaseOperation.CLEAN_INSERT.execute(connection, dataSet);\r
-\r
- } catch (Exception e) {\r
- logger.error(e);\r
- for(StackTraceElement ste : e.getStackTrace()) {\r
- logger.error(ste);\r
- }\r
- } finally {\r
- try {\r
- if (connection != null){\r
- connection.close();\r
- }\r
- } catch (SQLException sqle) {\r
- logger.error(sqle);\r
- }\r
- }\r
-\r
- transactionManager.commit(txStatus);\r
-\r
- txStatus = transactionManager.getTransaction(txDefinition);\r
-\r
- for(VocabularyEnum vocabularyType : VocabularyEnum.values()) {\r
- initializeAndStore(vocabularyType, new HashMap<UUID,DefinedTermBase>(), null);\r
- }\r
- transactionManager.commit(txStatus);\r
- //txStatus = transactionManager.getTransaction(txDefinition);\r
- }\r
- logger.info("TestingTermInitializer initialize end ...");\r
- }\r
-\r
- protected IDatabaseConnection getConnection() throws SQLException {\r
- IDatabaseConnection connection = null;\r
- try {\r
- connection = new DatabaseConnection(dataSource.getConnection());\r
-\r
- DatabaseConfig config = connection.getConfig();\r
- //FIXME must use unitils.properties: org.unitils.core.dbsupport.DbSupport.implClassName & database.dialect to find configured DataTypeFactory\r
- config.setProperty(DatabaseConfig.PROPERTY_DATATYPE_FACTORY,new H2DataTypeFactory());\r
- } catch (Exception e) {\r
- logger.error(e);\r
- }\r
- return connection;\r
- }\r
-}\r
+/**
+* Copyright (C) 2009 EDIT
+* European Distributed Institute of Taxonomy
+* http://www.e-taxonomy.eu
+*
+* The contents of this file are subject to the Mozilla Public License Version 1.1
+* See LICENSE.TXT at the top of this package for the full license terms.
+*/
+
+package eu.etaxonomy.cdm.database;
+
+import java.util.HashMap;
+import java.util.UUID;
+
+import javax.annotation.PostConstruct;
+import javax.sql.DataSource;
+
+import org.apache.log4j.Logger;
+import org.dbunit.database.DatabaseConfig;
+import org.dbunit.database.DatabaseConnection;
+import org.dbunit.database.IDatabaseConnection;
+import org.dbunit.dataset.IDataSet;
+import org.dbunit.dataset.xml.FlatXmlDataSetBuilder;
+import org.dbunit.ext.h2.H2DataTypeFactory;
+import org.dbunit.operation.DatabaseOperation;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.core.io.Resource;
+import org.springframework.transaction.TransactionStatus;
+
+import eu.etaxonomy.cdm.model.common.DefinedTermBase;
+import eu.etaxonomy.cdm.model.common.VocabularyEnum;
+
+public class TestingTermInitializer extends PersistentTermInitializer {
+ private static final Logger logger = Logger.getLogger(TestingTermInitializer.class);
+
+ private DataSource dataSource;
+
+ private Resource termsDataSet;
+
+ private Resource termsDtd;
+
+ public void setTermsDataSet(Resource termsDataSet) {
+ this.termsDataSet = termsDataSet;
+ }
+
+ public void setTermsDtd(Resource termsDtd) {
+ this.termsDtd = termsDtd;
+ }
+
+ @Autowired
+ public void setDataSource(DataSource dataSource) {
+ this.dataSource = dataSource;
+ }
+
+
+ @PostConstruct
+ @Override
+ public void initialize() {
+ super.initialize();
+ }
+
+ @Override
+ public void doInitialize(){
+ logger.info("TestingTermInitializer initialize start ...");
+ if (isOmit()){
+ logger.info("TestingTermInitializer.omit == true, returning without initializing terms");
+ return;
+ } else {
+ TransactionStatus txStatus = transactionManager.getTransaction(txDefinition);
+ IDatabaseConnection connection = null;
+
+ try {
+
+ connection = getConnection();
+
+// MultiSchemaXmlDataSetFactory dataSetFactory = new MultiSchemaXmlDataSetFactory();
+// MultiSchemaDataSet multiSchemaDataset = dataSetFactory.createDataSet(termsDataSet.getFile());
+//
+// if(multiSchemaDataset != null){
+// for (String name : multiSchemaDataset.getSchemaNames()) {
+// IDataSet clearDataSet = multiSchemaDataset.getDataSetForSchema(name);
+// DatabaseOperation.CLEAN_INSERT.execute(connection, clearDataSet);
+// }
+// }
+
+// logger.info("loading data base schema from " + termsDtd.getFile().getAbsolutePath());
+// logger.info("loading data set from " + termsDataSet.getFile().getAbsolutePath());
+
+
+ //old: IDataSet dataSet = new FlatXmlDataSet(new InputStreamReader(termsDataSet.getInputStream()),new InputStreamReader(termsDtd.getInputStream()));
+
+ IDataSet dataSet = new FlatXmlDataSetBuilder()
+ .setMetaDataSetFromDtd(termsDtd.getInputStream())
+ .build(termsDataSet.getInputStream());
+
+
+ //ITable definedTermBase = dataSet.getTable("DEFINEDTERMBASE");
+// for(int rowId = 0; rowId < definedTermBase.getRowCount(); rowId++) {
+// System.err.println(rowId + " : " + definedTermBase.getValue(rowId, "CREATEDBY_ID"));
+// }
+ DatabaseOperation.CLEAN_INSERT.execute(connection, dataSet);
+
+ } catch (Exception e) {
+ logger.error(e);
+ for(StackTraceElement ste : e.getStackTrace()) {
+ logger.error(ste);
+ }
+ } finally {
+// try {
+// this.transactionManager.commit(txStatus);
+// if (connection != null){
+// connection.close();
+// }
+// } catch (SQLException sqle) {
+// logger.error(sqle);
+// }
+ }
+
+ transactionManager.commit(txStatus);
+
+ txStatus = transactionManager.getTransaction(txDefinition);
+
+ for(VocabularyEnum vocabularyType : VocabularyEnum.values()) {
+ initializeAndStore(vocabularyType, new HashMap<UUID,DefinedTermBase>(), null);
+ }
+ transactionManager.commit(txStatus);
+ //txStatus = transactionManager.getTransaction(txDefinition);
+ }
+ logger.info("TestingTermInitializer initialize end ...");
+ }
+
+ protected IDatabaseConnection getConnection() {
+ IDatabaseConnection connection = null;
+ try {
+ connection = new DatabaseConnection(dataSource.getConnection());
+
+ DatabaseConfig config = connection.getConfig();
+ //FIXME must use unitils.properties: org.unitils.core.dbsupport.DbSupport.implClassName & database.dialect to find configured DataTypeFactory
+ config.setProperty(DatabaseConfig.PROPERTY_DATATYPE_FACTORY, new H2DataTypeFactory());
+ } catch (Exception e) {
+ logger.error(e);
+ }
+ return connection;
+ }
+}
import static org.junit.Assert.fail;
import java.io.FileNotFoundException;
-import java.io.FileOutputStream;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
assert taxon == null : "taxon must not exist";
setComplete();
endTransaction();
- try {
- printDataSet(new FileOutputStream("test.xml"), TABLE_NAMES);
- } catch (FileNotFoundException e) {
- e.printStackTrace();
- }
+// try {
+// printDataSet(new FileOutputStream("test.xml"), TABLE_NAMES);
+// } catch (FileNotFoundException e) {
+// e.printStackTrace();
+// }
}
@Test
assert taxon == null : "taxon must not exist";
setComplete();
endTransaction();
- try {
- printDataSet(new FileOutputStream("test.xml"), TABLE_NAMES);
- } catch (FileNotFoundException e) {
- e.printStackTrace();
- }
+// try {
+// printDataSet(new FileOutputStream("test.xml"), TABLE_NAMES);
+// } catch (FileNotFoundException e) {
+// e.printStackTrace();
+// }
}
@Test
-// $Id$\r
-/**\r
- * Copyright (C) 2015 EDIT\r
- * European Distributed Institute of Taxonomy\r
- * http://www.e-taxonomy.eu\r
- *\r
- * The contents of this file are subject to the Mozilla Public License Version 1.1\r
- * See LICENSE.TXT at the top of this package for the full license terms.\r
- */\r
-package eu.etaxonomy.cdm.persistence.dao.jdbc.validation;\r
-\r
-import static org.junit.Assert.assertEquals;\r
-import static org.junit.Assert.assertNotNull;\r
-import static org.junit.Assert.assertNull;\r
-import static org.junit.Assert.assertTrue;\r
-\r
-import java.io.FileNotFoundException;\r
-import java.util.ArrayList;\r
-import java.util.Collections;\r
-import java.util.Comparator;\r
-import java.util.HashSet;\r
-import java.util.List;\r
-import java.util.Set;\r
-import java.util.UUID;\r
-\r
-import javax.validation.ConstraintViolation;\r
-import javax.validation.Validation;\r
-import javax.validation.ValidatorFactory;\r
-\r
-import org.hibernate.validator.HibernateValidator;\r
-import org.hibernate.validator.HibernateValidatorConfiguration;\r
-import org.joda.time.DateTime;\r
-import org.junit.Before;\r
-import org.junit.Test;\r
-import org.unitils.dbunit.annotation.DataSet;\r
-import org.unitils.dbunit.annotation.ExpectedDataSet;\r
-\r
-import eu.etaxonomy.cdm.model.validation.CRUDEventType;\r
-import eu.etaxonomy.cdm.model.validation.EntityConstraintViolation;\r
-import eu.etaxonomy.cdm.model.validation.EntityValidation;\r
-import eu.etaxonomy.cdm.model.validation.Severity;\r
-import eu.etaxonomy.cdm.persistence.validation.Company;\r
-import eu.etaxonomy.cdm.persistence.validation.Employee;\r
-import eu.etaxonomy.cdm.test.integration.CdmIntegrationTest;\r
-import eu.etaxonomy.cdm.validation.Level2;\r
-\r
-/**\r
- * @author ayco_holleman\r
- * @date 20 jan. 2015\r
- *\r
- */\r
-public class EntityValidationCrudJdbcImplTest extends CdmIntegrationTest {\r
-\r
- private static final String MEDIA = "eu.etaxonomy.cdm.model.media.Media";\r
- private static final String SYNONYM_RELATIONSHIP = "eu.etaxonomy.cdm.model.taxon.SynonymRelationship";\r
- private static final String GATHERING_EVENT = "eu.etaxonomy.cdm.model.occurrence.GatheringEvent";\r
-\r
- /**\r
- * @throws java.lang.Exception\r
- */\r
- @Before\r
- public void setUp() throws Exception {\r
- }\r
-\r
- /**\r
- * Test method for\r
- * {@link eu.etaxonomy.cdm.persistence.dao.jdbc.validation.EntityValidationCrudJdbcImpl#EntityValidationCrudJdbcImpl()}\r
- * .\r
- */\r
- @SuppressWarnings("unused")\r
- @Test\r
- public void testEntityValidationCrudJdbcImpl() {\r
- new EntityValidationCrudJdbcImpl();\r
- }\r
-\r
- /**\r
- * Test method for\r
- * {@link eu.etaxonomy.cdm.persistence.dao.jdbc.validation.EntityValidationCrudJdbcImpl#EntityValidationCrudJdbcImpl (eu.etaxonomy.cdm.database.ICdmDataSource)}\r
- * .\r
- */\r
- @SuppressWarnings("unused")\r
- @Test\r
- public void test_EntityValidationCrudJdbcImplI_CdmDataSource() {\r
- new EntityValidationCrudJdbcImpl(dataSource);\r
- }\r
-\r
- /**\r
- * Test method for\r
- * {@link eu.etaxonomy.cdm.persistence.dao.jdbc.validation.EntityValidationCrudJdbcImpl#saveEntityValidation (eu.etaxonomy.cdm.model.common.CdmBase, java.util.Set, eu.etaxonomy.cdm.model.validation.CRUDEventType, Class)}\r
- * .\r
- */\r
- @Test\r
- @DataSet\r
- public void test_SaveValidationResult_Set_T_CRUDEventType() {\r
- HibernateValidatorConfiguration config = Validation.byProvider(HibernateValidator.class).configure();\r
- ValidatorFactory factory = config.buildValidatorFactory();\r
-\r
- // This is the bean that is going to be tested\r
- Employee emp = new Employee();\r
- emp.setId(1);\r
- UUID uuid = emp.getUuid();\r
- // ERROR 1 (should be JOHN)\r
- emp.setFirstName("john");\r
- // This is an error (should be SMITH), but it is a Level-3\r
- // validation error, so the error should be ignored\r
- emp.setLastName("smith");\r
-\r
- // This is an @Valid bean on the Employee class, so Level-2\r
- // validation errors on the Company object should also be\r
- // listed.\r
- Company comp = new Company();\r
- // ERROR 2 (should be GOOGLE)\r
- comp.setName("Google");\r
- emp.setCompany(comp);\r
-\r
- Set<ConstraintViolation<Employee>> errors = factory.getValidator().validate(emp, Level2.class);\r
- EntityValidationCrudJdbcImpl dao = new EntityValidationCrudJdbcImpl(dataSource);\r
- dao.saveEntityValidation(emp, errors, CRUDEventType.NONE, null);\r
-\r
- EntityValidation result = dao.getEntityValidation(emp.getClass().getName(), emp.getId());\r
- assertNotNull(result);\r
- assertEquals("Unexpected UUID", result.getValidatedEntityUuid(), uuid);\r
- assertEquals("Unexpected number of constraint violations", 2, result.getEntityConstraintViolations().size());\r
- Set<EntityConstraintViolation> violations = result.getEntityConstraintViolations();\r
- List<EntityConstraintViolation> list = new ArrayList<EntityConstraintViolation>(violations);\r
- Collections.sort(list, new Comparator<EntityConstraintViolation>() {\r
- @Override\r
- public int compare(EntityConstraintViolation o1, EntityConstraintViolation o2) {\r
- return o1.getPropertyPath().toString().compareTo(o2.getPropertyPath().toString());\r
- }\r
- });\r
- assertEquals("Unexpected propertypath", list.get(0).getPropertyPath().toString(), "company.name");\r
- assertEquals("Unexpected propertypath", list.get(1).getPropertyPath().toString(), "firstName");\r
-\r
- }\r
-\r
- @Test\r
- @DataSet("EntityValidationCrudJdbcImplTest.testSave.xml")\r
- @ExpectedDataSet("EntityValidationCrudJdbcImplTest.testSaveAlreadyExistingError-result.xml")\r
- // Test proving that if an exactly identical\r
- // EntityConstraintViolation (as per equals() method)\r
- // is already in database, the only thing that happens\r
- // is an increment of the validation counter.\r
- public void testSaveAlreadyExistingError() {\r
-\r
- // All same as in @DataSet:\r
-\r
- DateTime created = new DateTime(2014, 1, 1, 0, 0);\r
-\r
- Employee emp = new Employee();\r
- emp.setId(100);\r
- emp.setUuid(UUID.fromString("f8de74c6-aa56-4de3-931e-87b61da0218c"));\r
- // Other properties not relevant for this test\r
-\r
- EntityValidation entityValidation = EntityValidation.newInstance();\r
- entityValidation.setValidatedEntity(emp);\r
- entityValidation.setId(1);\r
- entityValidation.setUuid(UUID.fromString("dae5b090-30e8-45bc-9460-2eb2028d3c18"));\r
- entityValidation.setCreated(created);\r
- entityValidation.setCrudEventType(CRUDEventType.INSERT);\r
- entityValidation.setValidationCount(5);\r
-\r
- EntityConstraintViolation error = EntityConstraintViolation.newInstance();\r
-\r
- // Actually not same as in @DataSet to force\r
- // EntityConstraintViolation.equals() method to take\r
- // other properties into account (e.g. propertyPath,\r
- // invalidValue, etc.)\r
- error.setId(Integer.MIN_VALUE);\r
-\r
- error.setCreated(created);\r
- error.setUuid(UUID.fromString("358da71f-b646-4b79-b00e-dcb68b6425ba"));\r
- error.setSeverity(Severity.ERROR);\r
- error.setPropertyPath("firstName");\r
- error.setInvalidValue("Foo");\r
- error.setMessage("Garbage In Garbage Out");\r
- error.setValidationGroup("eu.etaxonomy.cdm.validation.Level2");\r
- error.setValidator("eu.etaxonomy.cdm.persistence.validation.GarbageValidator");\r
- Set<EntityConstraintViolation> errors = new HashSet<EntityConstraintViolation>(1);\r
- errors.add(error);\r
-\r
- entityValidation.addEntityConstraintViolation(error);\r
-\r
- EntityValidationCrudJdbcImpl dao = new EntityValidationCrudJdbcImpl(dataSource);\r
- dao.saveEntityValidation(entityValidation, new Class[] { Level2.class });\r
- }\r
-\r
- @Test\r
- @DataSet("EntityValidationCrudJdbcImplTest.testSave.xml")\r
- @ExpectedDataSet("EntityValidationCrudJdbcImplTest.testReplaceError-result.xml")\r
- // Test proving that if an entity has been validated,\r
- // yielding 1 error (as in @DataSet), and a subsequent\r
- // validation also yields 1 error, but a different one,\r
- // then validation count is increased, the old error is\r
- // removed and the new error is inserted.\r
- public void testReplaceError() {\r
-\r
- // All identical to @DataSet:\r
-\r
- DateTime created = new DateTime(2014, 1, 1, 0, 0);\r
-\r
- Employee emp = new Employee();\r
- emp.setId(100);\r
- emp.setUuid(UUID.fromString("f8de74c6-aa56-4de3-931e-87b61da0218c"));\r
-\r
- EntityValidation entityValidation = EntityValidation.newInstance();\r
- entityValidation.setValidatedEntity(emp);\r
- entityValidation.setId(1);\r
- entityValidation.setUuid(UUID.fromString("dae5b090-30e8-45bc-9460-2eb2028d3c18"));\r
- entityValidation.setCreated(created);\r
- entityValidation.setCrudEventType(CRUDEventType.INSERT);\r
- entityValidation.setValidationCount(5);\r
-\r
- EntityConstraintViolation error = EntityConstraintViolation.newInstance();\r
- error.setId(38);\r
- error.setCreated(created);\r
- error.setUuid(UUID.fromString("358da71f-b646-4b79-b00e-dcb68b6425ba"));\r
- error.setSeverity(Severity.ERROR);\r
- error.setPropertyPath("firstName");\r
-\r
- // Except for:\r
- error.setInvalidValue("Bar");\r
-\r
- error.setMessage("Garbage In Garbage Out");\r
- error.setValidationGroup("eu.etaxonomy.cdm.validation.Level2");\r
- error.setValidator("eu.etaxonomy.cdm.persistence.validation.GarbageValidator");\r
- Set<EntityConstraintViolation> errors = new HashSet<EntityConstraintViolation>(1);\r
- errors.add(error);\r
-\r
- entityValidation.addEntityConstraintViolation(error);\r
-\r
- EntityValidationCrudJdbcImpl dao = new EntityValidationCrudJdbcImpl(dataSource);\r
- dao.saveEntityValidation(entityValidation, new Class[] { Level2.class });\r
- }\r
-\r
- @Test\r
- @DataSet("EntityValidationCrudJdbcImplTest.testSave.xml")\r
- @ExpectedDataSet("EntityValidationCrudJdbcImplTest.testSameErrorOtherEntity-result.xml")\r
- // Test proving that if an entity has been validated,\r
- // yielding 1 error (as in @DataSet), and _another_\r
- // entity is now validated yielding an equals() error,\r
- // things behave as expected (2 entityvalidations, each\r
- // having 1 entityconstraintviolation)\r
- public void testSameErrorOtherEntity() {\r
-\r
- DateTime created = new DateTime(2014, 1, 1, 0, 0);\r
-\r
- // Not in @DataSet\r
- Employee emp = new Employee();\r
- emp.setId(200);\r
- emp.setUuid(UUID.fromString("f8de74c6-aa56-4de3-931e-87b61da0218d"));\r
-\r
- EntityValidation entityValidation = EntityValidation.newInstance();\r
- entityValidation.setValidatedEntity(emp);\r
- entityValidation.setId(2);\r
- entityValidation.setUuid(UUID.fromString("dae5b090-30e8-45bc-9460-2eb2028d3c19"));\r
- entityValidation.setCreated(created);\r
- entityValidation.setCrudEventType(CRUDEventType.INSERT);\r
- entityValidation.setValidationCount(1);\r
-\r
- // equals() error in @DataSet\r
- EntityConstraintViolation error = EntityConstraintViolation.newInstance();\r
- error.setId(2);\r
- error.setCreated(created);\r
- error.setUuid(UUID.fromString("358da71f-b646-4b79-b00e-dcb68b6425bb"));\r
- error.setSeverity(Severity.ERROR);\r
- error.setPropertyPath("firstName");\r
- error.setInvalidValue("Foo");\r
-\r
- error.setMessage("Garbage In Garbage Out");\r
- error.setValidationGroup("eu.etaxonomy.cdm.validation.Level2");\r
- error.setValidator("eu.etaxonomy.cdm.persistence.validation.GarbageValidator");\r
- Set<EntityConstraintViolation> errors = new HashSet<EntityConstraintViolation>(1);\r
- errors.add(error);\r
-\r
- entityValidation.addEntityConstraintViolation(error);\r
-\r
- EntityValidationCrudJdbcImpl dao = new EntityValidationCrudJdbcImpl(dataSource);\r
- dao.saveEntityValidation(entityValidation, new Class[] { Level2.class });\r
- }\r
- @Test\r
- @DataSet("EntityValidationCrudJdbcImplTest.testSave.xml")\r
- @ExpectedDataSet("EntityValidationCrudJdbcImplTest.testOneOldOneNewError-result.xml")\r
- // Test proving that if an entity has been validated,\r
- // yielding 1 error (as in @DataSet), and _another_\r
- // entity is now validated yielding an equals() error,\r
- // things behave as expected (2 entityvalidations, each\r
- // having 1 entityconstraintviolation)\r
- public void testOneOldOneNewError() {\r
-\r
- DateTime created = new DateTime(2014, 1, 1, 0, 0);\r
-\r
- // Same entity as in @DataSet\r
- Employee emp = new Employee();\r
- emp.setId(100);\r
- emp.setUuid(UUID.fromString("f8de74c6-aa56-4de3-931e-87b61da0218c"));\r
- // Other properties not relevant for this test\r
-\r
- EntityValidation entityValidation = EntityValidation.newInstance();\r
- entityValidation.setValidatedEntity(emp);\r
- entityValidation.setId(1);\r
- entityValidation.setUuid(UUID.fromString("dae5b090-30e8-45bc-9460-2eb2028d3c18"));\r
- entityValidation.setCreated(created);\r
- entityValidation.setCrudEventType(CRUDEventType.INSERT);\r
-\r
-\r
- // Old error (in @DataSet)\r
- EntityConstraintViolation error = EntityConstraintViolation.newInstance();\r
- error.setId(Integer.MIN_VALUE);\r
- error.setCreated(created);\r
- error.setUuid(UUID.fromString("358da71f-b646-4b79-b00e-dcb68b6425ba"));\r
- error.setSeverity(Severity.ERROR);\r
- error.setPropertyPath("firstName");\r
- error.setInvalidValue("Foo");\r
- error.setMessage("Garbage In Garbage Out");\r
- error.setValidationGroup("eu.etaxonomy.cdm.validation.Level2");\r
- error.setValidator("eu.etaxonomy.cdm.persistence.validation.GarbageValidator");\r
- entityValidation.addEntityConstraintViolation(error);\r
-\r
- // New error (not in @DataSet)\r
- error = EntityConstraintViolation.newInstance();\r
- // Don't leave ID generation to chance; we want it to be same as in\r
- // @ExpectedDataSet\r
- error.setId(2);\r
- error.setCreated(created);\r
- error.setUuid(UUID.fromString("358da71f-b646-4b79-b00e-dcb68b6425bb"));\r
- error.setSeverity(Severity.ERROR);\r
- error.setPropertyPath("lastName");\r
- error.setInvalidValue("Bar");\r
- error.setMessage("Garbage In Garbage Out");\r
- error.setValidationGroup("eu.etaxonomy.cdm.validation.Level2");\r
- error.setValidator("eu.etaxonomy.cdm.persistence.validation.LastNameValidator");\r
- entityValidation.addEntityConstraintViolation(error);\r
-\r
- EntityValidationCrudJdbcImpl dao = new EntityValidationCrudJdbcImpl(dataSource);\r
- dao.saveEntityValidation(entityValidation, new Class[] { Level2.class });\r
- }\r
-\r
-\r
-\r
- @Test\r
- @DataSet("EntityValidationCrudJdbcImplTest.testSave.xml")\r
- @ExpectedDataSet("EntityValidationCrudJdbcImplTest.testAllErrorsSolved-result.xml")\r
- // Test proving that if an entity has been validated,\r
- // yielding 1 error (as in @DataSet), and a subsequent\r
- // validation yields 0 errors, all that remains is an\r
- // EntityValidation record with its validation counter\r
- // increased.\r
- public void testAllErrorsSolved() {\r
-\r
- DateTime created = new DateTime(2014, 1, 1, 0, 0);\r
-\r
- Employee emp = new Employee();\r
- emp.setId(100);\r
- emp.setUuid(UUID.fromString("f8de74c6-aa56-4de3-931e-87b61da0218c"));\r
-\r
- EntityValidation entityValidation = EntityValidation.newInstance();\r
- entityValidation.setValidatedEntity(emp);\r
- entityValidation.setId(1);\r
- entityValidation.setUuid(UUID.fromString("dae5b090-30e8-45bc-9460-2eb2028d3c18"));\r
- entityValidation.setCreated(created);\r
- entityValidation.setCrudEventType(CRUDEventType.INSERT);\r
- entityValidation.setValidationCount(5);\r
-\r
- EntityValidationCrudJdbcImpl dao = new EntityValidationCrudJdbcImpl(dataSource);\r
- dao.saveEntityValidation(entityValidation, new Class[] { Level2.class });\r
- }\r
-\r
- /**\r
- * Test method for\r
- * {@link eu.etaxonomy.cdm.persistence.dao.jdbc.validation.EntityValidationCrudJdbcImpl#deleteEntityValidation (java.lang.String, int)}\r
- * .\r
- */\r
- @Test\r
- @DataSet\r
- @ExpectedDataSet\r
- public void test_DeleteValidationResult() {\r
- EntityValidationCrudJdbcImpl dao = new EntityValidationCrudJdbcImpl(dataSource);\r
- dao.deleteEntityValidation(SYNONYM_RELATIONSHIP, 200);\r
- EntityValidation result = dao.getEntityValidation(SYNONYM_RELATIONSHIP, 200);\r
- assertTrue(result == null);\r
- }\r
-\r
- @Test\r
- @DataSet\r
- public void testGetEntityValidation() {\r
- EntityValidationCrudJdbcImpl dao = new EntityValidationCrudJdbcImpl(dataSource);\r
- EntityValidation result;\r
-\r
- result = dao.getEntityValidation(MEDIA, 100);\r
- assertNotNull(result);\r
- assertEquals("Unexpected entity id", 1, result.getId());\r
- assertEquals("Unexpected number of constraint violations", 1, result.getEntityConstraintViolations().size());\r
-\r
- result = dao.getEntityValidation(SYNONYM_RELATIONSHIP, 200);\r
- assertNotNull(result);\r
- assertEquals("Unexpected entity id", 2, result.getId());\r
- assertEquals("Unexpected number of constraint violations", 2, result.getEntityConstraintViolations().size());\r
-\r
- result = dao.getEntityValidation(GATHERING_EVENT, 300);\r
- assertNotNull(result);\r
- assertEquals("Unexpected entity id", 3, result.getId());\r
- assertEquals("Unexpected number of constraint violations", 3, result.getEntityConstraintViolations().size());\r
-\r
- result = dao.getEntityValidation(GATHERING_EVENT, 301);\r
- assertNotNull(result);\r
- assertEquals("Unexpected entity id", 4, result.getId());\r
- assertEquals("Unexpected number of constraint violations", 1, result.getEntityConstraintViolations().size());\r
-\r
- // Test we get a null back\r
- result = dao.getEntityValidation("Foo Bar", 100);\r
- assertNull(result);\r
- }\r
-\r
- /**\r
- * Test method for\r
- * {@link eu.etaxonomy.cdm.persistence.dao.jdbc.validation.EntityValidationCrudJdbcImpl#setDatasource (eu.etaxonomy.cdm.database.ICdmDataSource)}\r
- * .\r
- */\r
- @Test\r
- public void testSetDatasource() {\r
- EntityValidationCrudJdbcImpl dao = new EntityValidationCrudJdbcImpl();\r
- dao.setDatasource(dataSource);\r
- }\r
-\r
- @Override\r
- public void createTestDataSet() throws FileNotFoundException {\r
- }\r
-\r
-}\r
+// $Id$
+/**
+ * Copyright (C) 2015 EDIT
+ * European Distributed Institute of Taxonomy
+ * http://www.e-taxonomy.eu
+ *
+ * The contents of this file are subject to the Mozilla Public License Version 1.1
+ * See LICENSE.TXT at the top of this package for the full license terms.
+ */
+package eu.etaxonomy.cdm.persistence.dao.jdbc.validation;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+
+import java.io.FileNotFoundException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+import java.util.UUID;
+
+import javax.validation.ConstraintViolation;
+import javax.validation.Validation;
+import javax.validation.ValidatorFactory;
+
+import org.hibernate.validator.HibernateValidator;
+import org.hibernate.validator.HibernateValidatorConfiguration;
+import org.joda.time.DateTime;
+import org.junit.Before;
+import org.junit.Ignore;
+import org.junit.Test;
+import org.unitils.dbunit.annotation.DataSet;
+import org.unitils.dbunit.annotation.ExpectedDataSet;
+import org.unitils.spring.annotation.SpringBeanByType;
+
+import eu.etaxonomy.cdm.model.validation.CRUDEventType;
+import eu.etaxonomy.cdm.model.validation.EntityConstraintViolation;
+import eu.etaxonomy.cdm.model.validation.EntityValidation;
+import eu.etaxonomy.cdm.model.validation.Severity;
+import eu.etaxonomy.cdm.persistence.validation.Company;
+import eu.etaxonomy.cdm.persistence.validation.Employee;
+import eu.etaxonomy.cdm.test.integration.CdmIntegrationTest;
+import eu.etaxonomy.cdm.validation.Level2;
+
+/**
+ * @author ayco_holleman
+ * @date 20 jan. 2015
+ *
+ */
+@Ignore
+public class EntityValidationCrudJdbcImplTest extends CdmIntegrationTest {
+
+ private static final String MEDIA = "eu.etaxonomy.cdm.model.media.Media";
+ private static final String SYNONYM_RELATIONSHIP = "eu.etaxonomy.cdm.model.taxon.SynonymRelationship";
+ private static final String GATHERING_EVENT = "eu.etaxonomy.cdm.model.occurrence.GatheringEvent";
+
+ @SpringBeanByType
+ private EntityValidationCrudJdbcImpl validationCrudJdbcDao;
+
+ /**
+ * @throws java.lang.Exception
+ */
+ @Before
+ public void setUp() throws Exception {
+ }
+
+ /**
+ * Test method for
+ * {@link eu.etaxonomy.cdm.persistence.dao.jdbc.validation.EntityValidationCrudJdbcImpl#EntityValidationCrudJdbcImpl()}
+ * .
+ */
+ @SuppressWarnings("unused")
+ @Test
+ public void testEntityValidationCrudJdbcImpl() {
+ new EntityValidationCrudJdbcImpl();
+ }
+
+ /**
+ * Test method for
+ * {@link eu.etaxonomy.cdm.persistence.dao.jdbc.validation.EntityValidationCrudJdbcImpl#EntityValidationCrudJdbcImpl (eu.etaxonomy.cdm.database.ICdmDataSource)}
+ * .
+ */
+ @SuppressWarnings("unused")
+ @Test
+ public void test_EntityValidationCrudJdbcImplI_CdmDataSource() {
+ new EntityValidationCrudJdbcImpl(dataSource);
+ }
+
+ /**
+ * Test method for
+ * {@link eu.etaxonomy.cdm.persistence.dao.jdbc.validation.EntityValidationCrudJdbcImpl#saveEntityValidation (eu.etaxonomy.cdm.model.common.CdmBase, java.util.Set, eu.etaxonomy.cdm.model.validation.CRUDEventType, Class)}
+ * .
+ */
+ @Test
+ @DataSet
+ public void test_SaveValidationResult_Set_T_CRUDEventType() {
+ HibernateValidatorConfiguration config = Validation.byProvider(HibernateValidator.class).configure();
+ ValidatorFactory factory = config.buildValidatorFactory();
+
+ // This is the bean that is going to be tested
+ Employee emp = new Employee();
+ emp.setId(1);
+ UUID uuid = emp.getUuid();
+ // ERROR 1 (should be JOHN)
+ emp.setFirstName("john");
+ // This is an error (should be SMITH), but it is a Level-3
+ // validation error, so the error should be ignored
+ emp.setLastName("smith");
+
+ // This is an @Valid bean on the Employee class, so Level-2
+ // validation errors on the Company object should also be
+ // listed.
+ Company comp = new Company();
+ // ERROR 2 (should be GOOGLE)
+ comp.setName("Google");
+ emp.setCompany(comp);
+
+ Set<ConstraintViolation<Employee>> errors = factory.getValidator().validate(emp, Level2.class);
+ EntityValidationCrudJdbcImpl dao = new EntityValidationCrudJdbcImpl(dataSource);
+ dao.saveEntityValidation(emp, errors, CRUDEventType.NONE, null);
+
+ EntityValidation result = dao.getEntityValidation(emp.getClass().getName(), emp.getId());
+ assertNotNull(result);
+ assertEquals("Unexpected UUID", result.getValidatedEntityUuid(), uuid);
+ assertEquals("Unexpected number of constraint violations", 2, result.getEntityConstraintViolations().size());
+ Set<EntityConstraintViolation> violations = result.getEntityConstraintViolations();
+ List<EntityConstraintViolation> list = new ArrayList<EntityConstraintViolation>(violations);
+ Collections.sort(list, new Comparator<EntityConstraintViolation>() {
+ @Override
+ public int compare(EntityConstraintViolation o1, EntityConstraintViolation o2) {
+ return o1.getPropertyPath().toString().compareTo(o2.getPropertyPath().toString());
+ }
+ });
+ assertEquals("Unexpected propertypath", list.get(0).getPropertyPath().toString(), "company.name");
+ assertEquals("Unexpected propertypath", list.get(1).getPropertyPath().toString(), "firstName");
+
+ }
+
+ @Test
+ @DataSet("EntityValidationCrudJdbcImplTest.testSave.xml")
+ @ExpectedDataSet("EntityValidationCrudJdbcImplTest.testSaveAlreadyExistingError-result.xml")
+ // Test proving that if an exactly identical
+ // EntityConstraintViolation (as per equals() method)
+ // is already in database, the only thing that happens
+ // is an increment of the validation counter.
+ public void testSaveAlreadyExistingError() {
+
+ // All same as in @DataSet:
+
+ DateTime created = new DateTime(2014, 1, 1, 0, 0);
+
+ Employee emp = new Employee();
+ emp.setId(100);
+ emp.setUuid(UUID.fromString("f8de74c6-aa56-4de3-931e-87b61da0218c"));
+ // Other properties not relevant for this test
+
+ EntityValidation entityValidation = EntityValidation.newInstance();
+ entityValidation.setValidatedEntity(emp);
+ entityValidation.setId(1);
+ entityValidation.setUuid(UUID.fromString("dae5b090-30e8-45bc-9460-2eb2028d3c18"));
+ entityValidation.setCreated(created);
+ entityValidation.setCrudEventType(CRUDEventType.INSERT);
+ entityValidation.setValidationCount(5);
+
+ EntityConstraintViolation error = EntityConstraintViolation.newInstance();
+
+ // Actually not same as in @DataSet to force
+ // EntityConstraintViolation.equals() method to take
+ // other properties into account (e.g. propertyPath,
+ // invalidValue, etc.)
+ error.setId(Integer.MIN_VALUE);
+
+ error.setCreated(created);
+ error.setUuid(UUID.fromString("358da71f-b646-4b79-b00e-dcb68b6425ba"));
+ error.setSeverity(Severity.ERROR);
+ error.setPropertyPath("firstName");
+ error.setInvalidValue("Foo");
+ error.setMessage("Garbage In Garbage Out");
+ error.setValidationGroup("eu.etaxonomy.cdm.validation.Level2");
+ error.setValidator("eu.etaxonomy.cdm.persistence.validation.GarbageValidator");
+ Set<EntityConstraintViolation> errors = new HashSet<EntityConstraintViolation>(1);
+ errors.add(error);
+
+ entityValidation.addEntityConstraintViolation(error);
+
+ EntityValidationCrudJdbcImpl dao = new EntityValidationCrudJdbcImpl(dataSource);
+ dao.saveEntityValidation(entityValidation, new Class[] { Level2.class });
+ }
+
+ @Test
+ @DataSet("EntityValidationCrudJdbcImplTest.testSave.xml")
+ @ExpectedDataSet("EntityValidationCrudJdbcImplTest.testReplaceError-result.xml")
+ // Test proving that if an entity has been validated,
+ // yielding 1 error (as in @DataSet), and a subsequent
+ // validation also yields 1 error, but a different one,
+ // then validation count is increased, the old error is
+ // removed and the new error is inserted.
+ public void testReplaceError() {
+
+ // All identical to @DataSet:
+
+ DateTime created = new DateTime(2014, 1, 1, 0, 0);
+
+ Employee emp = new Employee();
+ emp.setId(100);
+ emp.setUuid(UUID.fromString("f8de74c6-aa56-4de3-931e-87b61da0218c"));
+
+ EntityValidation entityValidation = EntityValidation.newInstance();
+ entityValidation.setValidatedEntity(emp);
+ entityValidation.setId(1);
+ entityValidation.setUuid(UUID.fromString("dae5b090-30e8-45bc-9460-2eb2028d3c18"));
+ entityValidation.setCreated(created);
+ entityValidation.setCrudEventType(CRUDEventType.INSERT);
+ entityValidation.setValidationCount(5);
+
+ EntityConstraintViolation error = EntityConstraintViolation.newInstance();
+ error.setId(38);
+ error.setCreated(created);
+ error.setUuid(UUID.fromString("358da71f-b646-4b79-b00e-dcb68b6425ba"));
+ error.setSeverity(Severity.ERROR);
+ error.setPropertyPath("firstName");
+
+ // Except for:
+ error.setInvalidValue("Bar");
+
+ error.setMessage("Garbage In Garbage Out");
+ error.setValidationGroup("eu.etaxonomy.cdm.validation.Level2");
+ error.setValidator("eu.etaxonomy.cdm.persistence.validation.GarbageValidator");
+ Set<EntityConstraintViolation> errors = new HashSet<EntityConstraintViolation>(1);
+ errors.add(error);
+
+ entityValidation.addEntityConstraintViolation(error);
+
+ EntityValidationCrudJdbcImpl dao = new EntityValidationCrudJdbcImpl(dataSource);
+ dao.saveEntityValidation(entityValidation, new Class[] { Level2.class });
+ }
+
+ @Test
+ @DataSet("EntityValidationCrudJdbcImplTest.testSave.xml")
+ @ExpectedDataSet("EntityValidationCrudJdbcImplTest.testSameErrorOtherEntity-result.xml")
+ // Test proving that if an entity has been validated,
+ // yielding 1 error (as in @DataSet), and _another_
+ // entity is now validated yielding an equals() error,
+ // things behave as expected (2 entityvalidations, each
+ // having 1 entityconstraintviolation)
+ public void testSameErrorOtherEntity() {
+
+ DateTime created = new DateTime(2014, 1, 1, 0, 0);
+
+ // Not in @DataSet
+ Employee emp = new Employee();
+ emp.setId(200);
+ emp.setUuid(UUID.fromString("f8de74c6-aa56-4de3-931e-87b61da0218d"));
+
+ EntityValidation entityValidation = EntityValidation.newInstance();
+ entityValidation.setValidatedEntity(emp);
+ entityValidation.setId(2);
+ entityValidation.setUuid(UUID.fromString("dae5b090-30e8-45bc-9460-2eb2028d3c19"));
+ entityValidation.setCreated(created);
+ entityValidation.setCrudEventType(CRUDEventType.INSERT);
+ entityValidation.setValidationCount(1);
+
+ // equals() error in @DataSet
+ EntityConstraintViolation error = EntityConstraintViolation.newInstance();
+ error.setId(2);
+ error.setCreated(created);
+ error.setUuid(UUID.fromString("358da71f-b646-4b79-b00e-dcb68b6425bb"));
+ error.setSeverity(Severity.ERROR);
+ error.setPropertyPath("firstName");
+ error.setInvalidValue("Foo");
+
+ error.setMessage("Garbage In Garbage Out");
+ error.setValidationGroup("eu.etaxonomy.cdm.validation.Level2");
+ error.setValidator("eu.etaxonomy.cdm.persistence.validation.GarbageValidator");
+ Set<EntityConstraintViolation> errors = new HashSet<EntityConstraintViolation>(1);
+ errors.add(error);
+
+ entityValidation.addEntityConstraintViolation(error);
+
+ EntityValidationCrudJdbcImpl dao = new EntityValidationCrudJdbcImpl(dataSource);
+ dao.saveEntityValidation(entityValidation, new Class[] { Level2.class });
+ }
+ @Test
+ @DataSet("EntityValidationCrudJdbcImplTest.testSave.xml")
+ @ExpectedDataSet("EntityValidationCrudJdbcImplTest.testOneOldOneNewError-result.xml")
+ // Test proving that if an entity has been validated,
+ // yielding 1 error (as in @DataSet), and _another_
+ // entity is now validated yielding an equals() error,
+ // things behave as expected (2 entityvalidations, each
+ // having 1 entityconstraintviolation)
+ public void testOneOldOneNewError() {
+
+ DateTime created = new DateTime(2014, 1, 1, 0, 0);
+
+ // Same entity as in @DataSet
+ Employee emp = new Employee();
+ emp.setId(100);
+ emp.setUuid(UUID.fromString("f8de74c6-aa56-4de3-931e-87b61da0218c"));
+ // Other properties not relevant for this test
+
+ EntityValidation entityValidation = EntityValidation.newInstance();
+ entityValidation.setValidatedEntity(emp);
+ entityValidation.setId(1);
+ entityValidation.setUuid(UUID.fromString("dae5b090-30e8-45bc-9460-2eb2028d3c18"));
+ entityValidation.setCreated(created);
+ entityValidation.setCrudEventType(CRUDEventType.INSERT);
+
+
+ // Old error (in @DataSet)
+ EntityConstraintViolation error = EntityConstraintViolation.newInstance();
+ error.setId(Integer.MIN_VALUE);
+ error.setCreated(created);
+ error.setUuid(UUID.fromString("358da71f-b646-4b79-b00e-dcb68b6425ba"));
+ error.setSeverity(Severity.ERROR);
+ error.setPropertyPath("firstName");
+ error.setInvalidValue("Foo");
+ error.setMessage("Garbage In Garbage Out");
+ error.setValidationGroup("eu.etaxonomy.cdm.validation.Level2");
+ error.setValidator("eu.etaxonomy.cdm.persistence.validation.GarbageValidator");
+ entityValidation.addEntityConstraintViolation(error);
+
+ // New error (not in @DataSet)
+ error = EntityConstraintViolation.newInstance();
+ // Don't leave ID generation to chance; we want it to be same as in
+ // @ExpectedDataSet
+ error.setId(2);
+ error.setCreated(created);
+ error.setUuid(UUID.fromString("358da71f-b646-4b79-b00e-dcb68b6425bb"));
+ error.setSeverity(Severity.ERROR);
+ error.setPropertyPath("lastName");
+ error.setInvalidValue("Bar");
+ error.setMessage("Garbage In Garbage Out");
+ error.setValidationGroup("eu.etaxonomy.cdm.validation.Level2");
+ error.setValidator("eu.etaxonomy.cdm.persistence.validation.LastNameValidator");
+ entityValidation.addEntityConstraintViolation(error);
+
+// EntityValidationCrudJdbcImpl dao = new EntityValidationCrudJdbcImpl(dataSource);
+ validationCrudJdbcDao.saveEntityValidation(entityValidation, new Class[] { Level2.class });
+ }
+
+
+
+ @Test
+ @DataSet("EntityValidationCrudJdbcImplTest.testSave.xml")
+ @ExpectedDataSet("EntityValidationCrudJdbcImplTest.testAllErrorsSolved-result.xml")
+ // Test proving that if an entity has been validated,
+ // yielding 1 error (as in @DataSet), and a subsequent
+ // validation yields 0 errors, all that remains is an
+ // EntityValidation record with its validation counter
+ // increased.
+ public void testAllErrorsSolved() {
+
+ DateTime created = new DateTime(2014, 1, 1, 0, 0);
+
+ Employee emp = new Employee();
+ emp.setId(100);
+ emp.setUuid(UUID.fromString("f8de74c6-aa56-4de3-931e-87b61da0218c"));
+
+ EntityValidation entityValidation = EntityValidation.newInstance();
+ entityValidation.setValidatedEntity(emp);
+ entityValidation.setId(1);
+ entityValidation.setUuid(UUID.fromString("dae5b090-30e8-45bc-9460-2eb2028d3c18"));
+ entityValidation.setCreated(created);
+ entityValidation.setCrudEventType(CRUDEventType.INSERT);
+ entityValidation.setValidationCount(5);
+
+ EntityValidationCrudJdbcImpl dao = new EntityValidationCrudJdbcImpl(dataSource);
+ dao.saveEntityValidation(entityValidation, new Class[] { Level2.class });
+ }
+
+ /**
+ * Test method for
+ * {@link eu.etaxonomy.cdm.persistence.dao.jdbc.validation.EntityValidationCrudJdbcImpl#deleteEntityValidation (java.lang.String, int)}
+ * .
+ */
+ @Test
+ @DataSet
+ @ExpectedDataSet
+ public void test_DeleteValidationResult() {
+ EntityValidationCrudJdbcImpl dao = new EntityValidationCrudJdbcImpl(dataSource);
+ dao.deleteEntityValidation(SYNONYM_RELATIONSHIP, 200);
+ EntityValidation result = dao.getEntityValidation(SYNONYM_RELATIONSHIP, 200);
+ assertTrue(result == null);
+ }
+
+ @Test
+ @DataSet
+ public void testGetEntityValidation() {
+ EntityValidationCrudJdbcImpl dao = new EntityValidationCrudJdbcImpl(dataSource);
+ EntityValidation result;
+
+ result = dao.getEntityValidation(MEDIA, 100);
+ assertNotNull("A validation result for media id=100 should exist", result);
+ assertEquals("Unexpected entity id", 1, result.getId());
+ assertEquals("Unexpected number of constraint violations", 1, result.getEntityConstraintViolations().size());
+
+ result = dao.getEntityValidation(SYNONYM_RELATIONSHIP, 200);
+ assertNotNull(result);
+ assertEquals("Unexpected entity id", 2, result.getId());
+ assertEquals("Unexpected number of constraint violations", 2, result.getEntityConstraintViolations().size());
+
+ result = dao.getEntityValidation(GATHERING_EVENT, 300);
+ assertNotNull(result);
+ assertEquals("Unexpected entity id", 3, result.getId());
+ assertEquals("Unexpected number of constraint violations", 3, result.getEntityConstraintViolations().size());
+
+ result = dao.getEntityValidation(GATHERING_EVENT, 301);
+ assertNotNull(result);
+ assertEquals("Unexpected entity id", 4, result.getId());
+ assertEquals("Unexpected number of constraint violations", 1, result.getEntityConstraintViolations().size());
+
+ // Test we get a null back
+ result = dao.getEntityValidation("Foo Bar", 100);
+ assertNull(result);
+ }
+
+ /**
+ * Test method for
+ * {@link eu.etaxonomy.cdm.persistence.dao.jdbc.validation.EntityValidationCrudJdbcImpl#setDatasource (eu.etaxonomy.cdm.database.ICdmDataSource)}
+ * .
+ */
+ @Test
+ public void testSetDatasource() {
+ EntityValidationCrudJdbcImpl dao = new EntityValidationCrudJdbcImpl();
+ dao.setDatasource(dataSource);
+ }
+
+ @Override
+ public void createTestDataSet() throws FileNotFoundException {
+ }
+
+}
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.sql.SQLException;
import org.dbunit.dataset.xml.FlatXmlDataSet;
import org.dbunit.dataset.xml.FlatXmlWriter;
import org.dbunit.ext.h2.H2DataTypeFactory;
-import org.dbunit.operation.DatabaseOperation;
import org.h2.tools.Server;
import org.junit.Before;
import org.springframework.transaction.PlatformTransactionManager;
-import org.springframework.transaction.TransactionStatus;
import org.springframework.transaction.support.DefaultTransactionDefinition;
import org.unitils.UnitilsJUnit4;
import org.unitils.database.annotations.TestDataSource;
import org.unitils.dbunit.util.MultiSchemaXmlDataSetReader;
+import org.unitils.orm.hibernate.annotation.HibernateSessionFactory;
import org.unitils.spring.annotation.SpringApplicationContext;
import org.unitils.spring.annotation.SpringBeanByType;
* An internal h2 database application will be started and listens at port 8082.
* The port to listen on can be specified by passing a second argument, e.g.: <code>-Dh2Server 8083</code>.
*
- * <h2>Creating create DbUnit dataset files</h2>
+ * <h2>Creating DbUnit dataset files</h2>
* In order to create DbUnit datasets for integration tests it is highly recommended method to use the
* {@link #writeDbUnitDataSetFile(String[])} method.
*
- * From {@link http://www.unitils.org/tutorial-database.html}, by default every test is executed in a transaction,
- * which is committed at the end of the test. This can be disabled using @Transactional(TransactionMode.DISABLED)
+ * From {@link http://www.unitils.org/tutorial-database.html}, by default every test is executed in a
+ * transaction, which is committed at the end of the test. This can be disabled using
+ * @Transactional(TransactionMode.DISABLED)
*
* @see <a href="http://www.unitils.org">unitils home page</a>
*
* @author a.kohlbecker (2013)
*/
@SpringApplicationContext("file:./target/test-classes/eu/etaxonomy/cdm/applicationContext-test.xml")
+@HibernateSessionFactory("/eu/etaxonomy/cdm/hibernate.cfg.xml")
public abstract class CdmIntegrationTest extends UnitilsJUnit4 {
protected static final Logger logger = Logger.getLogger(CdmIntegrationTest.class);
logger.error(e);
} finally {
try {
- connection.close();
+ if (connection != null){
+ connection.close();
+ }
} catch (SQLException sqle) {
logger.error(sqle);
}
* @param excludeFilter the tables to be <em>excluded</em>
*/
public void printDataSetWithNull(OutputStream out, Boolean excludeTermLoadingTables,
- ITableFilterSimple excludeFilter, String[] includeTableNames) {
+ ITableFilterSimple excludeFilterOrig, String[] includeTableNames) {
+ ITableFilterSimple excludeFilter = excludeFilterOrig;
if(excludeTermLoadingTables != null && excludeTermLoadingTables.equals(true)){
ExcludeTableFilter excludeTableFilter = new ExcludeTableFilter();
logger.error("Error on writing dataset:", e);
} finally {
try {
- connection.close();
+ if (connection != null){
+ connection.close();
+ }
} catch (SQLException sqle) {
logger.error(sqle);
}
logger.error(ioe);
}
try {
- connection.close();
+ if (connection != null){
+ connection.close();
+ }
} catch (SQLException sqle) {
logger.error(sqle);
}
logger.error(e);
} finally {
try {
- connection.close();
+ if (connection != null){
+ connection.close();
+ }
} catch (SQLException sqle) {
logger.error(sqle);
}
logger.error(e);
} finally {
try {
- connection.close();
+ if (connection != null){
+ connection.close();
+ }
} catch (SQLException sqle) {
logger.error(sqle);
}
logger.error(e);
} finally {
try {
- connection.close();
+ if (connection != null){
+ connection.close();
+ }
} catch (SQLException sqle) {
logger.error(sqle);
}
* @return
* @throws TransformerException
*/
- public String transformSourceToString(Source source) throws TransformerException {
+ protected String transformSourceToString(Source source) throws TransformerException {
TransformerFactory transformerFactory = TransformerFactory.newInstance();
Transformer transformer = transformerFactory.newTransformer();
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
return new String(outputStream.toByteArray());
}
- @Deprecated // no longer used and for sure not needed at all
- protected void loadDataSet(InputStream dataset) {
- txDefinition.setName("CdmIntergartionTest.loadDataSet");
- TransactionStatus txStatus = transactionManager.getTransaction(txDefinition);
- IDatabaseConnection connection = null;
-
- try {
- connection = getConnection();
- IDataSet dataSet = new FlatXmlDataSet(new InputStreamReader(dataset));
-
- DatabaseOperation.CLEAN_INSERT.execute(connection, dataSet);
- } catch (Exception e) {
- logger.error(e);
- for(StackTraceElement ste : e.getStackTrace()) {
- logger.error(ste);
- }
- } finally {
- try {
- connection.close();
- } catch (SQLException sqle) {
- logger.error(sqle);
- }
- }
- transactionManager.commit(txStatus);
- }
/**
* This is the common method to create test data xml files for integration tests.
/**\r
- * \r
+ *\r
*/\r
package eu.etaxonomy.cdm.test.unitils;\r
\r
import java.io.IOException;\r
\r
import org.hibernate.HibernateException;\r
+import org.hibernate.boot.model.naming.PhysicalNamingStrategy;\r
+import org.hibernate.boot.model.naming.PhysicalNamingStrategyStandardImpl;\r
+import org.hibernate.cfg.AvailableSettings;\r
import org.hibernate.cfg.Configuration;\r
-import org.hibernate.cfg.DefaultComponentSafeNamingStrategy;\r
-import org.hibernate.cfg.Environment;\r
-import org.hibernate.cfg.NamingStrategy;\r
import org.hibernate.dialect.H2CorrectedDialect;\r
-import org.hibernate.envers.configuration.AuditConfiguration;\r
+import org.hibernate.envers.boot.internal.EnversService;\r
+import org.hibernate.envers.boot.internal.EnversServiceImpl;\r
import org.hibernate.tool.hbm2ddl.SchemaExport;\r
import org.springframework.core.io.ClassPathResource;\r
\r
/**\r
- * \r
+ *\r
* This class may help to create your DDL file.\r
* However, it does not support Auditing table yet as they are (maybe) not supported\r
- * by Hibernate 4 hbm2dll. \r
+ * by Hibernate 4 hbm2dll.\r
* It is also unclear if the antrun plugin supports envers in hibernate 4. I wasn't successful with it.\r
* http://docs.jboss.org/hibernate/orm/4.2/devguide/en-US/html/ch15.html#envers-generateschema\r
- * \r
+ *\r
* Also the result needs to be changed to uppercase and some _uniquekey statements need to be replaced as they are not\r
* unique themselves.\r
- * \r
+ *\r
* The result is stored in a file "new-cdm.h2.sql" in the root directory and is written to the console.\r
- * \r
+ *\r
* @author a.mueller\r
*\r
*/\r
public class DdlCreator {\r
- \r
- public static void main(String[] args) { \r
+\r
+ public static void main(String[] args) {\r
try {\r
new DdlCreator().execute(H2CorrectedDialect.class, "h2");\r
} catch (Exception e) {\r
e.printStackTrace();\r
}\r
}\r
- \r
+\r
private void execute(Class<?> dialect, String lowerCaseDialectName, Class<?>... classes) throws IOException, HibernateException, InstantiationException, IllegalAccessException {\r
String classPath = "eu/etaxonomy/cdm/hibernate.cfg.xml";\r
ClassPathResource resource = new ClassPathResource(classPath);\r
File file = resource.getFile();\r
- \r
+\r
// File file = new File("C:\\Users\\pesiimport\\Documents\\cdm-3.3\\cdmlib-persistence\\src\\main\\resources\\eu\\etaxonomy\\cdm\\hibernate.cfg.xml");\r
System.out.println(file.exists());\r
- \r
+\r
Configuration config = new Configuration().addFile(file);\r
- config.setProperty(Environment.DIALECT, dialect.getCanonicalName());\r
- NamingStrategy namingStrategy = new DefaultComponentSafeNamingStrategy(); //; = new ImprovedNamingStrategy();\r
- config.setNamingStrategy(namingStrategy);\r
- \r
+ config.setProperty(AvailableSettings.DIALECT, dialect.getCanonicalName());\r
+// NamingStrategyDelegator;\r
+ PhysicalNamingStrategy namingStrategy = new PhysicalNamingStrategyStandardImpl();\r
+// new DefaultComponentSafeNamingStrategy(); //; = new ImprovedNamingStrategy();\r
+ config.setPhysicalNamingStrategy(namingStrategy);\r
+\r
config.configure(file);\r
// String[] schema = config.generateSchemaCreationScript((Dialect)dialect.newInstance());\r
// for (String s : schema){\r
// System.out.println(s);\r
// }\r
- \r
- AuditConfiguration.getFor(config);\r
+\r
+ //FIXME #4716\r
+ EnversService enversService = new EnversServiceImpl();\r
+// . .getFor(config.);\r
SchemaExport schemaExport = new SchemaExport(config);\r
schemaExport.setDelimiter(";");\r
schemaExport.drop(false, false);\r
boolean consolePrint = true;\r
boolean exportInDatabase = false;\r
schemaExport.create(consolePrint, exportInDatabase);\r
- \r
+\r
schemaExport.execute(consolePrint, exportInDatabase, false, true);\r
- \r
+\r
}\r
}\r
\r
--- /dev/null
+package org.hibernate.cfg;
+
+
+// This class is only kept as the unitils setup requires it, maybe because it is mentioned
+// in the unitils-core unitils-default.properties for parameter HibernateModule.configuration.implClassName
+
+//It can be removed once unitils doesn't need it anymore
+
+@Deprecated
+public class AnnotationConfiguration extends Configuration {
+
+}
--- /dev/null
+// $Id$
+/**
+* Copyright (C) 2015 EDIT
+* European Distributed Institute of Taxonomy
+* http://www.e-taxonomy.eu
+*
+* The contents of this file are subject to the Mozilla Public License Version 1.1
+* See LICENSE.TXT at the top of this package for the full license terms.
+*/
+package org.unitils.orm.hibernate;
+
+import org.hibernate.SessionFactory;
+import org.hibernate.cfg.Configuration;
+import org.unitils.orm.common.util.OrmConfig;
+import org.unitils.orm.common.util.OrmPersistenceUnitLoader;
+import org.unitils.orm.hibernate.util.Hibernate5SessionFactoryLoader;
+
+/**
+ * @author a.mueller
+ * @date 03.11.2015
+ *
+ */
+public class Hibernate5Module extends HibernateModule {
+
+ @Override
+ protected OrmPersistenceUnitLoader<SessionFactory, Configuration, OrmConfig> createOrmPersistenceUnitLoader() {
+ return new Hibernate5SessionFactoryLoader(databaseName);
+ }
+
+}
--- /dev/null
+/*
+ * Copyright 2008, Unitils.org
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.unitils.orm.hibernate.util;
+
+import javax.sql.DataSource;
+
+import org.hibernate.SessionFactory;
+import org.hibernate.cfg.Configuration;
+import org.springframework.core.io.ClassPathResource;
+import org.springframework.core.io.Resource;
+import org.springframework.orm.hibernate5.LocalSessionFactoryBean;
+import org.unitils.core.Unitils;
+import org.unitils.core.UnitilsException;
+import org.unitils.database.DatabaseModule;
+import org.unitils.orm.common.util.ConfiguredOrmPersistenceUnit;
+import org.unitils.orm.common.util.OrmConfig;
+import org.unitils.orm.common.util.OrmPersistenceUnitLoader;
+import org.unitils.orm.hibernate.Hibernate5Module;
+
+
+//todo javadoc
+public class Hibernate5SessionFactoryLoader implements OrmPersistenceUnitLoader<SessionFactory, Configuration, OrmConfig> {
+
+ protected String databaseName;
+ /**
+ * @param databaseName
+ */
+ public Hibernate5SessionFactoryLoader(String databaseName) {
+ this.databaseName = databaseName;
+ }
+
+
+ @Override
+ public ConfiguredOrmPersistenceUnit<SessionFactory, Configuration> getConfiguredOrmPersistenceUnit(Object testObject, OrmConfig entityManagerConfig) {
+ LocalSessionFactoryBean factoryBean = createSessionFactoryBean(testObject, entityManagerConfig);
+ SessionFactory entityManagerFactory = factoryBean.getObject();
+ Configuration hibernateConfiguration = factoryBean.getConfiguration();
+ return new ConfiguredOrmPersistenceUnit<SessionFactory, Configuration>(entityManagerFactory, hibernateConfiguration);
+ }
+
+
+ protected LocalSessionFactoryBean createSessionFactoryBean(Object testObject, OrmConfig entityManagerConfig) {
+ // A custom subclass of spring's LocalSessionFactoryBean is used, to enable calling a custom config method
+ UnitilsHibernate5LocalSessionFactoryBean factoryBean = new UnitilsHibernate5LocalSessionFactoryBean();
+ factoryBean.setDataSource(getDataSource());
+// factoryBean.set .setConfigurationClass(getConfigurationObjectClass());
+ Resource[] hibernateConfigFiles = new Resource[entityManagerConfig.getConfigFiles().size()];
+ int index = 0;
+ for (String configFileName : entityManagerConfig.getConfigFiles()) {
+ hibernateConfigFiles[index++] = new ClassPathResource(configFileName);
+ }
+ factoryBean.setConfigLocations(hibernateConfigFiles);
+
+ // Enable invocation of custom config method
+ factoryBean.setTestObject(testObject);
+ factoryBean.setCustomConfigMethod(entityManagerConfig.getConfigMethod());
+
+ // Build SessionFactory
+ try {
+ factoryBean.afterPropertiesSet();
+ } catch (Exception e) {
+ throw new UnitilsException("Error while processing " + LocalSessionFactoryBean.class.getSimpleName() + " configuration", e);
+ }
+
+ return factoryBean;
+ }
+
+
+ protected Class<? extends Configuration> getConfigurationObjectClass() {
+ return getHibernateModule().getConfigurationObjectClass();
+ }
+
+
+ protected DataSource getDataSource() {
+ return getDatabaseModule().getWrapper(databaseName).getDataSourceAndActivateTransactionIfNeeded();
+ }
+
+
+ protected DatabaseModule getDatabaseModule() {
+ return Unitils.getInstance().getModulesRepository().getModuleOfType(DatabaseModule.class);
+ }
+
+
+ protected Hibernate5Module getHibernateModule() {
+ return Unitils.getInstance().getModulesRepository().getModuleOfType(Hibernate5Module.class);
+ }
+}
--- /dev/null
+/*
+ * Copyright 2008, Unitils.org
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.unitils.orm.hibernate.util;
+
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.Method;
+
+import org.hibernate.HibernateException;
+import org.hibernate.boot.model.naming.ImplicitNamingStrategyComponentPathImpl;
+import org.hibernate.cfg.Configuration;
+import org.springframework.orm.hibernate5.LocalSessionFactoryBean;
+import org.unitils.core.UnitilsException;
+import org.unitils.util.ReflectionUtils;
+
+public class UnitilsHibernate5LocalSessionFactoryBean extends LocalSessionFactoryBean {
+
+ private Object testObject;
+
+ private Method customConfigMethod;
+
+// ******************* CONSTRUCTOR *********************************/
+
+ /**
+ */
+ public UnitilsHibernate5LocalSessionFactoryBean() {
+ //FIXME configure naming strategy via real configuration
+ this.setImplicitNamingStrategy(new ImplicitNamingStrategyComponentPathImpl());
+ }
+
+//****************************** SETTER **********************************/
+
+ public void setTestObject(Object testObject) {
+ this.testObject = testObject;
+ }
+
+ public void setCustomConfigMethod(Method customConfigMethod) {
+ this.customConfigMethod = customConfigMethod;
+ }
+
+//*************************** METHOD *******************************/
+
+// @Override //Not used, kept to remind that this method existed in hibernate3
+ protected void postProcessConfiguration(Configuration config) throws HibernateException {
+ if (customConfigMethod != null) {
+ try {
+ ReflectionUtils.invokeMethod(testObject, customConfigMethod, config);
+ } catch (InvocationTargetException e) {
+ throw new UnitilsException("Error while invoking custom config method", e.getCause());
+ }
+ }
+ }
+
+
+}
+++ /dev/null
-<?xml version="1.0" encoding="UTF-8"?>
-<!-- NOTE: It looks like this file is currently not in use. a.mueller (2013-06-28) -->
-<persistence xmlns="http://java.sun.com/xml/ns/persistence" version="1.0">
- <persistence-unit name="cdm">
- <provider>org.hibernate.ejb.HibernatePersistence</provider>
- <!-- Annotation Package -->
- <class>eu.etaxonomy.cdm.model.agent.Address</class>
- <class>eu.etaxonomy.cdm.model.agent.AgentBase</class>
- <class>eu.etaxonomy.cdm.model.agent.Contact</class>
- <class>eu.etaxonomy.cdm.model.agent.Institution</class>
- <class>eu.etaxonomy.cdm.model.agent.InstitutionalMembership</class>
- <class>eu.etaxonomy.cdm.model.agent.Person</class>
- <class>eu.etaxonomy.cdm.model.agent.Team</class>
- <class>eu.etaxonomy.cdm.model.agent.TeamOrPersonBase</class>
- <!-- Common Package -->
- <class>eu.etaxonomy.cdm.model.common.Annotation</class>
- <class>eu.etaxonomy.cdm.model.common.AnnotationType</class>
- <class>eu.etaxonomy.cdm.model.common.Credit</class>
- <class>eu.etaxonomy.cdm.model.common.DefinedTerm</class>
- <class>eu.etaxonomy.cdm.model.common.DefinedTermBase</class>
- <class>eu.etaxonomy.cdm.model.description.DescriptionElementSource</class>
- <class>eu.etaxonomy.cdm.model.common.Extension</class>
- <class>eu.etaxonomy.cdm.model.common.ExtensionType</class>
- <class>eu.etaxonomy.cdm.model.common.GrantedAuthorityImpl</class>
- <class>eu.etaxonomy.cdm.model.common.Group</class>
- <class>eu.etaxonomy.cdm.model.common.IdentifiableSource</class>
- <class>eu.etaxonomy.cdm.model.common.Language</class>
- <class>eu.etaxonomy.cdm.model.common.LanguageString</class>
- <class>eu.etaxonomy.cdm.model.common.LSID</class>
- <class>eu.etaxonomy.cdm.model.common.LSIDAuthority</class>
- <class>eu.etaxonomy.cdm.model.common.Marker</class>
- <class>eu.etaxonomy.cdm.model.common.MarkerType</class>
- <class>eu.etaxonomy.cdm.model.common.OrderedTermBase</class>
- <class>eu.etaxonomy.cdm.model.common.OrderedTermVocabulary</class>
- <class>eu.etaxonomy.cdm.model.common.RelationshipTermBase</class>
- <class>eu.etaxonomy.cdm.model.common.Representation</class>
- <class>eu.etaxonomy.cdm.model.common.TermVocabulary</class>
- <class>eu.etaxonomy.cdm.model.common.User</class>
- <!-- Description Package -->
- <class>eu.etaxonomy.cdm.model.description.AbsenceTerm</class>
- <class>eu.etaxonomy.cdm.model.description.CategoricalData</class>
- <class>eu.etaxonomy.cdm.model.description.CommonTaxonName</class>
- <class>eu.etaxonomy.cdm.model.description.DescriptionBase</class>
- <class>eu.etaxonomy.cdm.model.description.DescriptionElementBase</class>
- <class>eu.etaxonomy.cdm.model.description.Distribution</class>
- <class>eu.etaxonomy.cdm.model.description.Feature</class>
- <class>eu.etaxonomy.cdm.model.description.FeatureNode</class>
- <class>eu.etaxonomy.cdm.model.description.FeatureTree</class>
- <class>eu.etaxonomy.cdm.model.description.MediaKey</class>
- <class>eu.etaxonomy.cdm.model.description.IndividualsAssociation</class>
- <class>eu.etaxonomy.cdm.model.description.MeasurementUnit</class>
- <class>eu.etaxonomy.cdm.model.description.MultiAccessKey</class>
- <class>eu.etaxonomy.cdm.model.description.PolytomousKey</class>
- <class>eu.etaxonomy.cdm.model.description.PresenceAbsenceTermBase</class>
- <class>eu.etaxonomy.cdm.model.description.PresenceTerm</class>
- <class>eu.etaxonomy.cdm.model.description.QuantitativeData</class>
- <class>eu.etaxonomy.cdm.model.description.SpecimenDescription</class>
- <class>eu.etaxonomy.cdm.model.description.State</class>
- <class>eu.etaxonomy.cdm.model.description.StateData</class>
- <class>eu.etaxonomy.cdm.model.description.StatisticalMeasure</class>
- <class>eu.etaxonomy.cdm.model.description.StatisticalMeasurementValue</class>
- <class>eu.etaxonomy.cdm.model.description.TaxonDescription</class>
- <class>eu.etaxonomy.cdm.model.description.TaxonInteraction</class>
- <class>eu.etaxonomy.cdm.model.description.TaxonNameDescription</class>
- <class>eu.etaxonomy.cdm.model.description.TextData</class>
- <class>eu.etaxonomy.cdm.model.description.TextFormat</class>
- <class>eu.etaxonomy.cdm.model.description.WorkingSet</class>
- <!-- Location Package -->
- <class>eu.etaxonomy.cdm.model.location.Continent</class>
- <class>eu.etaxonomy.cdm.model.location.NamedArea</class>
- <class>eu.etaxonomy.cdm.model.location.NamedAreaLevel</class>
- <class>eu.etaxonomy.cdm.model.location.NamedAreaType</class>
- <class>eu.etaxonomy.cdm.model.location.ReferenceSystem</class>
- <class>eu.etaxonomy.cdm.model.location.Point</class>
- <class>eu.etaxonomy.cdm.model.location.Country</class>
- <!-- Media Package -->
- <class>eu.etaxonomy.cdm.model.media.AudioFile</class>
- <class>eu.etaxonomy.cdm.model.media.ImageFile</class>
- <class>eu.etaxonomy.cdm.model.media.Media</class>
- <class>eu.etaxonomy.cdm.model.media.MediaRepresentation</class>
- <class>eu.etaxonomy.cdm.model.media.MediaRepresentationPart</class>
- <class>eu.etaxonomy.cdm.model.media.MovieFile</class>
- <class>eu.etaxonomy.cdm.model.media.ReferencedMedia</class>
- <class>eu.etaxonomy.cdm.model.media.Rights</class>
- <class>eu.etaxonomy.cdm.model.media.RightsType</class>
- <!-- Molecular Package -->
- <class>eu.etaxonomy.cdm.model.molecular.DnaSample"</class>
- <class>eu.etaxonomy.cdm.model.molecular.Amplification"</class>
- <class>eu.etaxonomy.cdm.model.molecular.SingleRead"</class>
- <class>eu.etaxonomy.cdm.model.molecular.Sequencing"</class>
- <class>eu.etaxonomy.cdm.model.molecular.Primer"</class>
- <class>eu.etaxonomy.cdm.model.molecular.PhylogeneticTree"</class>
- <class>eu.etaxonomy.cdm.model.molecular.Sequence"</class>
- <!-- Name Package -->
- <class>eu.etaxonomy.cdm.model.name.BacterialName</class>
- <class>eu.etaxonomy.cdm.model.name.BotanicalName</class>
- <class>eu.etaxonomy.cdm.model.name.CultivarPlantName</class>
- <class>eu.etaxonomy.cdm.model.name.HomotypicalGroup</class>
- <class>eu.etaxonomy.cdm.model.name.HybridRelationship</class>
- <class>eu.etaxonomy.cdm.model.name.HybridRelationshipType</class>
- <class>eu.etaxonomy.cdm.model.name.NameRelationship</class>
- <class>eu.etaxonomy.cdm.model.name.NameRelationshipType</class>
- <class>eu.etaxonomy.cdm.model.name.NameTypeDesignation</class>
- <class>eu.etaxonomy.cdm.model.name.NameTypeDesignationStatus</class>
- <class>eu.etaxonomy.cdm.model.name.NomenclaturalCode</class>
- <class>eu.etaxonomy.cdm.model.name.NomenclaturalStatus</class>
- <class>eu.etaxonomy.cdm.model.name.NomenclaturalStatusType</class>
- <class>eu.etaxonomy.cdm.model.name.NonViralName</class>
- <class>eu.etaxonomy.cdm.model.name.Rank</class>
- <class>eu.etaxonomy.cdm.model.name.SpecimenTypeDesignation</class>
- <class>eu.etaxonomy.cdm.model.name.SpecimenTypeDesignationStatus</class>
- <class>eu.etaxonomy.cdm.model.name.TaxonNameBase</class>
- <class>eu.etaxonomy.cdm.model.name.TypeDesignationBase</class>
- <class>eu.etaxonomy.cdm.model.name.ViralName</class>
- <class>eu.etaxonomy.cdm.model.name.ZoologicalName</class>
- <!-- Occurence Package -->
- <class>eu.etaxonomy.cdm.model.occurrence.Collection</class>
- <class>eu.etaxonomy.cdm.model.occurrence.DerivationEvent</class>
- <class>eu.etaxonomy.cdm.model.occurrence.DerivationEventType</class>
- <class>eu.etaxonomy.cdm.model.occurrence.DerivedUnit</class>
- <class>eu.etaxonomy.cdm.model.occurrence.DeterminationEvent</class>
- <class>eu.etaxonomy.cdm.model.occurrence.FieldUnit</class>
- <class>eu.etaxonomy.cdm.model.occurrence.GatheringEvent</class>
- <class>eu.etaxonomy.cdm.model.occurrence.PreservationMethod</class>
- <class>eu.etaxonomy.cdm.model.occurrence.SpecimenOrObservationBase</class>
- <!-- Reference Package -->
- <class>eu.etaxonomy.cdm.model.reference.Reference</class>
- <!-- Taxon Package -->
- <class>eu.etaxonomy.cdm.model.taxon.Synonym</class>
- <class>eu.etaxonomy.cdm.model.taxon.SynonymRelationship</class>
- <class>eu.etaxonomy.cdm.model.taxon.SynonymRelationshipType</class>
- <class>eu.etaxonomy.cdm.model.taxon.Taxon</class>
- <class>eu.etaxonomy.cdm.model.taxon.TaxonBase</class>
- <class>eu.etaxonomy.cdm.model.taxon.TaxonNode</class>
- <class>eu.etaxonomy.cdm.model.taxon.TaxonRelationship</class>
- <class>eu.etaxonomy.cdm.model.taxon.TaxonRelationshipType</class>
- <class>eu.etaxonomy.cdm.model.taxon.Classification</class>
- <!-- View Package -->
- <class>eu.etaxonomy.cdm.model.view.View</class>
- <class>eu.etaxonomy.cdm.model.view.AuditEvent</class>
- <class>eu.etaxonomy.cdm.model.common</class>
- <!-- Validation Package -->
- <class>eu.etaxonomy.cdm.model.validation.EntityValidation</class>
- <class>eu.etaxonomy.cdm.model.validation.EntityConstraintViolation</class>
-
-
- <exclude-unlisted-classes />
- <!-- <properties>
- <property name="hibernate.dialect" value="org.hibernate.dialect.HSQLDialect"/>
- <property name="hibernate.connection.url" value="jdbc:hsqldb:mem:cdm"/>
- <property name="hibernate.connection.driver_class" value="org.hsqldb.jdbcDriver"/>
- <property name="hibernate.connection.username" value="sa"/>
- <property name="hibernate.connection.password" value=""/>
- <property name="hibernate.hbm2ddl.auto" value="create-drop"/>
- <property name="hibernate.ejb.naming_strategy" value="org.hibernate.cfg.DefaultComponentSafeNamingStrategy"/>
- </properties> -->
- </persistence-unit>
-</persistence>
\ No newline at end of file
<prop key="hibernate.format_sql">false</prop>
<prop key="hibernate.search.default.directory_provider">org.hibernate.search.store.impl.FSDirectoryProvider</prop>
<prop key="hibernate.search.default.indexBase">./target/index</prop>
-
- <!-- temp for h4 migration testing
- <prop key="hibernate.connection.driver_class">org.h2.Driver</prop>
- <prop key="hibernate.connection.url">jdbc:h2:mem:cdm</prop>
- <prop key="hibernate.connection.username">sa</prop>
- <prop key="hibernate.connection.password"></prop>
- <prop key="hibernate.search.lucene_version">LUCENE_36</prop>
- end temp for h4 migration testing
- -->
+ <prop key="hibernate.dialect">org.hibernate.dialect.H2CorrectedDialect</prop>
+ <prop key="implicitNamingStrategy">org.hibernate.boot.model.naming.ImplicitNamingStrategyComponentPathImpl"</prop>
+
</props>
</property>
</bean>
http://www.springframework.org/schema/tx
http://www.springframework.org/schema/tx/spring-tx-4.0.xsd">
- <!-- requires at least spring 2.5.4 <context:property-override location="classpath:eu/etaxonomy/cdm/persistence/override.properties"/>-->
+ <!-- requires at least spring 2.5.4
+ <context:property-override location="classpath:eu/etaxonomy/cdm/persistence/override.properties"/>
+ -->
<bean id="dataSource" class="org.unitils.database.UnitilsDataSourceFactoryBean"/>
-<?xml version='1.0' encoding='UTF-8'?>\r
+<?xml version='1.0' encoding='UTF-8'?>
<dataset xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:noNamespaceSchemaLocation="../dataset.xsd">
- <DESCRIPTIONBASE DTYPE="TaxonDescription" ID="1" CREATED="2008-12-10 09:56:07.0" UUID="5f3265ed-68ad-4ec3-826f-0d29d25986b9" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE=" sec. ???" IMAGEGALLERY="false" TAXON_ID="1"/>\r
- <DESCRIPTIONBASE DTYPE="TaxonDescription" ID="2" CREATED="2008-12-10 09:56:07.0" UUID="d69d6d40-eb98-42f9-8d30-fbeb0a7db33b" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE=" sec. ???" IMAGEGALLERY="false" TAXON_ID="2"/>\r
- <DESCRIPTIONBASE DTYPE="TaxonDescription" ID="3" CREATED="2008-12-10 09:56:07.0" UUID="c6782124-7cf0-4454-880f-c4f16dc03105" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Sphingidae Linnaeus, 1758 sec. cate-sphingidae.org" IMAGEGALLERY="false" TAXON_ID="3"/>\r
- <DESCRIPTIONBASE DTYPE="TaxonDescription" ID="4" CREATED="2008-12-10 09:56:07.0" UUID="68ff9cb2-3fc0-4580-8295-bbb0de0d42bf" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Lathoe Fabricius, 1807 sec. cate-sphingidae.org" IMAGEGALLERY="false" TAXON_ID="4"/>\r
- <DESCRIPTIONBASE DTYPE="TaxonDescription" ID="5" CREATED="2008-12-10 09:56:07.0" UUID="1240e8ba-32a2-4914-ad00-2f87e18ab65e" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Smerinthus Latreille, 1802 sec. cate-sphingidae.org" IMAGEGALLERY="false" TAXON_ID="5"/>\r
- <DESCRIPTIONBASE DTYPE="TaxonDescription" ID="6" CREATED="2008-12-10 09:56:07.0" UUID="5c4bef76-72a6-4e39-a6d8-1d8707519d7a" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Manduca Hübner, 1807 sec. cate-sphingidae.org" IMAGEGALLERY="false" TAXON_ID="6"/>\r
- <DESCRIPTIONBASE DTYPE="TaxonDescription" ID="7" CREATED="2008-12-10 09:56:07.0" UUID="40dadf87-4a1c-4852-afb4-6787a4e75854" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Lepchina Oberthür, 1904 sec. cate-sphingidae.org" IMAGEGALLERY="false" TAXON_ID="7"/>\r
- <DESCRIPTIONBASE DTYPE="TaxonDescription" ID="8" CREATED="2008-12-10 09:56:07.0" UUID="1f02a155-3669-464b-99a9-4c36307549ce" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Smerinthus kindermannii Lederer, 1853 sec. cate-sphingidae.org" IMAGEGALLERY="false" TAXON_ID="8"/>\r
- <DESCRIPTIONBASE DTYPE="TaxonDescription" ID="9" CREATED="2008-12-10 09:56:07.0" UUID="b58fb240-01ac-4a68-99f7-1ccb5576d663" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Mimas Hübner, 1819 sec. cate-sphingidae.org" IMAGEGALLERY="false" TAXON_ID="9"/>\r
- <DESCRIPTIONBASE DTYPE="TaxonDescription" ID="10" CREATED="2008-12-10 09:56:07.0" UUID="5b99a457-e776-41a8-b18b-58c5fb0b7b60" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Callambulyx Rothschild & Jordan, 1903 sec. cate-sphingidae.org" IMAGEGALLERY="false" TAXON_ID="10"/>\r
- <DESCRIPTIONBASE DTYPE="TaxonDescription" ID="11" CREATED="2008-12-10 09:56:07.0" UUID="3316a5a7-56f5-4e12-b6ed-c1758e0d2065" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Dolbina Staudinger, 1877 sec. cate-sphingidae.org" IMAGEGALLERY="false" TAXON_ID="11"/>\r
- <DESCRIPTIONBASE DTYPE="TaxonDescription" ID="12" CREATED="2008-12-10 09:56:07.0" UUID="0e9bf65a-1322-408c-bf9e-01ea6834f191" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Akbesia Rothschild & Jordan, 1903 sec. cate-sphingidae.org" IMAGEGALLERY="false" TAXON_ID="12"/>\r
- <DESCRIPTIONBASE DTYPE="TaxonDescription" ID="13" CREATED="2008-12-10 09:56:07.0" UUID="1f17b094-77a6-4f0b-b687-f6cd0b3f5cb8" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Sphinx Linnaeus, 1758 sec. cate-sphingidae.org" IMAGEGALLERY="false" TAXON_ID="13"/>\r
- <DESCRIPTIONBASE DTYPE="TaxonDescription" ID="14" CREATED="2008-12-10 09:56:07.0" UUID="eb591ebe-bec2-4639-b449-c3fd7f5e129c" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Agrius Hübner, 1819 sec. cate-sphingidae.org" IMAGEGALLERY="false" TAXON_ID="14"/>\r
- <DESCRIPTIONBASE DTYPE="TaxonDescription" ID="15" CREATED="2008-12-10 09:56:07.0" UUID="f2a093ed-7d09-45ea-abca-366a299b19d3" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Acherontia Laspeyres, 1809 sec. cate-sphingidae.org" IMAGEGALLERY="false" TAXON_ID="15"/>\r
- <DESCRIPTIONBASE DTYPE="TaxonDescription" ID="16" CREATED="2008-12-10 09:56:07.0" UUID="f6367b5a-6466-4cf2-8eb0-23a558514914" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Hemaris Dalman, 1816 sec. cate-sphingidae.org" IMAGEGALLERY="false" TAXON_ID="16"/>\r
- <DESCRIPTIONBASE DTYPE="TaxonDescription" ID="17" CREATED="2008-12-10 09:56:07.0" UUID="fd9fe4e0-5137-483c-a8ee-c27d2f45d308" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Proserpinus Hübner, 1819 sec. cate-sphingidae.org" IMAGEGALLERY="false" TAXON_ID="17"/>\r
- <DESCRIPTIONBASE DTYPE="TaxonDescription" ID="18" CREATED="2008-12-10 09:56:07.0" UUID="5bb4bd8f-9d96-4535-8a7b-3895bc229fd0" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Sphingonaepiopsis Wallengren, 1858 sec. cate-sphingidae.org" IMAGEGALLERY="false" TAXON_ID="18"/>\r
- <DESCRIPTIONBASE DTYPE="TaxonDescription" ID="19" CREATED="2008-12-10 09:56:07.0" UUID="61bad9ae-900d-40e2-90f1-cd516f68a7aa" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Rethera Rothschild & Jordan, 1903 sec. cate-sphingidae.org" IMAGEGALLERY="false" TAXON_ID="19"/>\r
- <DESCRIPTIONBASE DTYPE="TaxonDescription" ID="20" CREATED="2008-12-10 09:56:07.0" UUID="8ed18615-f9be-4c3c-871a-7d2371b49869" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Daphnis Hübner, 1819 sec. cate-sphingidae.org" IMAGEGALLERY="false" TAXON_ID="20"/>\r
- <DESCRIPTIONBASE DTYPE="TaxonDescription" ID="21" CREATED="2008-12-10 09:56:07.0" UUID="4e2b5eca-db5f-41ac-a4e3-e936cae658da" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Clarina Tutt, 1903 sec. cate-sphingidae.org" IMAGEGALLERY="false" TAXON_ID="21"/>\r
- <DESCRIPTIONBASE DTYPE="TaxonDescription" ID="22" CREATED="2008-12-10 09:56:07.0" UUID="a96cdfdb-7f9d-44b7-8c94-d68bc18850d5" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Acosmeryx Boisduval, 1875 sec. cate-sphingidae.org" IMAGEGALLERY="false" TAXON_ID="22"/>\r
- <DESCRIPTIONBASE DTYPE="TaxonDescription" ID="23" CREATED="2008-12-10 09:56:07.0" UUID="7c79ee94-fc7f-4437-b5aa-b27a4dd482ac" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Macroglossum Scopoli, 1777 sec. cate-sphingidae.org" IMAGEGALLERY="false" TAXON_ID="23"/>\r
- <DESCRIPTIONBASE DTYPE="TaxonDescription" ID="24" CREATED="2008-12-10 09:56:07.0" UUID="03a54c6d-efb8-48c3-8672-e01880bcd5d7" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Hyles Hübner, 1819 sec. cate-sphingidae.org" IMAGEGALLERY="false" TAXON_ID="24"/>\r
- <DESCRIPTIONBASE DTYPE="TaxonDescription" ID="25" CREATED="2008-12-10 09:56:07.0" UUID="e7243cb3-625b-4104-bb46-b92d48351d76" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Deilephila Laspeyres, 1809 sec. cate-sphingidae.org" IMAGEGALLERY="false" TAXON_ID="25"/>\r
- <DESCRIPTIONBASE DTYPE="TaxonDescription" ID="26" CREATED="2008-12-10 09:56:07.0" UUID="0d7bf5f6-9fef-482e-b6f8-7e9a9c42c744" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Hippotion Hübner, 1819 sec. cate-sphingidae.org" IMAGEGALLERY="false" TAXON_ID="26"/>\r
- <DESCRIPTIONBASE DTYPE="TaxonDescription" ID="27" CREATED="2008-12-10 09:56:07.0" UUID="cfa7ca5e-3ed1-4eda-93ab-b3c5e5fc3661" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Manduca afflicta (Grote, 1865) sec. cate-sphingidae.org" IMAGEGALLERY="false" TAXON_ID="32"/>\r
- <DESCRIPTIONBASE DTYPE="TaxonDescription" ID="28" CREATED="2008-12-10 09:56:07.0" UUID="7129d981-7c92-4bef-9afe-a0d6e21a2e4e" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Manduca bergarmatipes (Clark, 1927) sec. cate-sphingidae.org" IMAGEGALLERY="false" TAXON_ID="33"/>\r
- <DESCRIPTIONBASE DTYPE="TaxonDescription" ID="29" CREATED="2008-12-10 09:56:07.0" UUID="ef66e9ec-b9a4-435e-98ed-e3e72a86e9a4" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Manduca chinchilla (Gehlen, 1942) sec. cate-sphingidae.org" IMAGEGALLERY="false" TAXON_ID="34"/>\r
- <DESCRIPTIONBASE DTYPE="TaxonDescription" ID="30" CREATED="2008-12-10 09:56:07.0" UUID="6ef3bcac-7192-4ee0-8c1b-57e0b3d49c5c" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Acherontia atropos (Linnaeus, 1758) sec. cate-sphingidae.org" IMAGEGALLERY="false" TAXON_ID="35"/>\r
- <DESCRIPTIONBASE DTYPE="TaxonDescription" ID="31" CREATED="2008-12-10 09:56:07.0" UUID="fd6cdb64-142c-4df1-b366-c5e76f08a1fc" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Acherontia lachesis (Fabricius, 1798) sec. cate-sphingidae.org" IMAGEGALLERY="false" TAXON_ID="36"/>\r
- <DESCRIPTIONBASE DTYPE="TaxonDescription" ID="32" CREATED="2008-12-10 09:56:07.0" UUID="6d647bbe-5d91-46be-87f5-1781d9d5842c" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Acherontia styx Westwood, 1847 sec. cate-sphingidae.org" IMAGEGALLERY="false" TAXON_ID="37"/>\r
- <DESCRIPTIONBASE DTYPE="TaxonDescription" ID="33" CREATED="2008-12-10 09:56:07.0" UUID="620de7a8-8c83-42c9-add7-fdc55ebf943a" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Cryptocoryne x purpurea nothovar borneoensis N.Jacobsen, Bastm. & Yuji Sasaki sec. cate-sphingidae.org" IMAGEGALLERY="false" TAXON_ID="38"/>\r
- <DESCRIPTIONBASE DTYPE="SpecimenDescription" ID="34" CREATED="2013-06-26 18:26:26.0" UUID="7c45d218-b2c9-4298-9abd-b86e60f4fef8" UPDATED="2013-06-26 18:26:26.0" PROTECTEDTITLECACHE="true" TITLECACHE="specimendescription 1" IMAGEGALLERY="false" TAXON_ID="[null]"/>\r
-\r
- <DESCRIPTIONBASE_MARKER DESCRIPTIONBASE_ID="31" MARKERS_ID="1"/>\r
- <DESCRIPTIONBASE_MARKER DESCRIPTIONBASE_ID="31" MARKERS_ID="2"/>\r
- <MARKER ID="1" UUID="a0b943f6-3737-4ba4-9d5c-72f3f1476996" FLAG="TRUE" MARKEDOBJ_TYPE="eu.etaxonomy.cdm.model.description.TaxonDescription" MARKEDOBJ_ID="31" MARKERTYPE_ID="890"/>\r
- <MARKER ID="2" UUID="e873c908-ec5b-4edf-8e80-11da5a9d26b3" FLAG="FALSE" MARKEDOBJ_TYPE="eu.etaxonomy.cdm.model.description.TaxonDescription" MARKEDOBJ_ID="31" MARKERTYPE_ID="892"/>\r
-\r
- <DESCRIPTIONELEMENTBASE DTYPE="Distribution" ID="1" INDESCRIPTION_ID="1" CREATED="2008-12-10 09:56:07.0" UUID="40458e70-a065-450f-b27d-adf61cc28a7f" UPDATED="2008-12-10 09:56:07.253" FEATURE_ID="[null]" AREA_ID="1969" STATUS_ID="1994"/>\r
- <DESCRIPTIONELEMENTBASE DTYPE="Distribution" ID="2" INDESCRIPTION_ID="2" CREATED="2008-12-10 09:56:07.0" UUID="d4099b8d-0644-4025-8a56-e7fc2d95004e" UPDATED="2008-12-10 09:56:07.253" FEATURE_ID="[null]" AREA_ID="1969" STATUS_ID="1994"/>\r
- <DESCRIPTIONELEMENTBASE DTYPE="Distribution" ID="3" INDESCRIPTION_ID="3" CREATED="2008-12-10 09:56:07.0" UUID="317fafca-3722-4d8d-8c4f-701d4f5b911d" UPDATED="2008-12-10 09:56:07.253" FEATURE_ID="[null]" AREA_ID="1969" STATUS_ID="1994"/>\r
- <DESCRIPTIONELEMENTBASE DTYPE="Distribution" ID="4" INDESCRIPTION_ID="4" CREATED="2008-12-10 09:56:07.0" UUID="452b7bbc-cdb3-4315-9a3d-5be293fb85ee" UPDATED="2008-12-10 09:56:07.253" FEATURE_ID="[null]" AREA_ID="1969" STATUS_ID="1994"/>\r
- <DESCRIPTIONELEMENTBASE DTYPE="Distribution" ID="5" INDESCRIPTION_ID="5" CREATED="2008-12-10 09:56:07.0" UUID="db46d7eb-e8df-4a42-bcc1-d6b4f6baf2f9" UPDATED="2008-12-10 09:56:07.253" FEATURE_ID="[null]" AREA_ID="1969" STATUS_ID="1994"/>\r
- <DESCRIPTIONELEMENTBASE DTYPE="Distribution" ID="6" INDESCRIPTION_ID="6" CREATED="2008-12-10 09:56:07.0" UUID="51f4771f-b01d-4e3f-a5de-87c6f7b0e2f2" UPDATED="2008-12-10 09:56:07.253" FEATURE_ID="[null]" AREA_ID="1969" STATUS_ID="1994"/>\r
- <DESCRIPTIONELEMENTBASE DTYPE="Distribution" ID="7" INDESCRIPTION_ID="7" CREATED="2008-12-10 09:56:07.0" UUID="a453ae39-fbb9-494d-81e1-dc9e1a305e01" UPDATED="2008-12-10 09:56:07.253" FEATURE_ID="[null]" AREA_ID="1969" STATUS_ID="1994"/>\r
- <DESCRIPTIONELEMENTBASE DTYPE="Distribution" ID="8" INDESCRIPTION_ID="8" CREATED="2008-12-10 09:56:07.0" UUID="11f15758-fa8d-4dae-91b9-e5b475481890" UPDATED="2008-12-10 09:56:07.253" FEATURE_ID="[null]" AREA_ID="1969" STATUS_ID="1994"/>\r
- <DESCRIPTIONELEMENTBASE DTYPE="Distribution" ID="9" INDESCRIPTION_ID="9" CREATED="2008-12-10 09:56:07.0" UUID="bbe158e3-89d6-443f-a4e5-7ef9e790b6ff" UPDATED="2008-12-10 09:56:07.253" FEATURE_ID="[null]" AREA_ID="1969" STATUS_ID="1994"/>\r
- <DESCRIPTIONELEMENTBASE DTYPE="Distribution" ID="10" INDESCRIPTION_ID="10" CREATED="2008-12-10 09:56:07.0" UUID="160612a7-85b6-4b3f-a892-f8d6066de37a" UPDATED="2008-12-10 09:56:07.253" FEATURE_ID="[null]" AREA_ID="1970" STATUS_ID="1994"/>\r
- <DESCRIPTIONELEMENTBASE DTYPE="Distribution" ID="11" INDESCRIPTION_ID="11" CREATED="2008-12-10 09:56:07.0" UUID="06ecc14e-6c65-44ed-a3f4-91c27298b50c" UPDATED="2008-12-10 09:56:07.253" FEATURE_ID="[null]" AREA_ID="1970" STATUS_ID="1994"/>\r
- <DESCRIPTIONELEMENTBASE DTYPE="Distribution" ID="12" INDESCRIPTION_ID="12" CREATED="2008-12-10 09:56:07.0" UUID="f6ba0d83-bdca-470d-a793-435981ba08cf" UPDATED="2008-12-10 09:56:07.253" FEATURE_ID="[null]" AREA_ID="1970" STATUS_ID="1994"/>\r
- <DESCRIPTIONELEMENTBASE DTYPE="Distribution" ID="13" INDESCRIPTION_ID="13" CREATED="2008-12-10 09:56:07.0" UUID="4f02b16e-7711-4e74-af56-e3f4d755dc18" UPDATED="2008-12-10 09:56:07.253" FEATURE_ID="[null]" AREA_ID="1970" STATUS_ID="1994"/>\r
- <DESCRIPTIONELEMENTBASE DTYPE="Distribution" ID="14" INDESCRIPTION_ID="14" CREATED="2008-12-10 09:56:07.0" UUID="173fa31b-47d4-461f-8198-84ffbf67df14" UPDATED="2008-12-10 09:56:07.253" FEATURE_ID="[null]" AREA_ID="1970" STATUS_ID="1994"/>\r
- <DESCRIPTIONELEMENTBASE DTYPE="Distribution" ID="15" INDESCRIPTION_ID="15" CREATED="2008-12-10 09:56:07.0" UUID="aa079c74-ea22-4d8d-98e3-95da8ba79fa6" UPDATED="2008-12-10 09:56:07.253" FEATURE_ID="[null]" AREA_ID="1970" STATUS_ID="1994"/>\r
- <DESCRIPTIONELEMENTBASE DTYPE="Distribution" ID="16" INDESCRIPTION_ID="16" CREATED="2008-12-10 09:56:07.0" UUID="bb1422a1-8b54-4b51-9dcf-3644c68d2111" UPDATED="2008-12-10 09:56:07.253" FEATURE_ID="[null]" AREA_ID="1970" STATUS_ID="1994"/>\r
- <DESCRIPTIONELEMENTBASE DTYPE="Distribution" ID="17" INDESCRIPTION_ID="17" CREATED="2008-12-10 09:56:07.0" UUID="2a65248c-93cf-4113-b149-b890629ccefb" UPDATED="2008-12-10 09:56:07.253" FEATURE_ID="[null]" AREA_ID="1970" STATUS_ID="1994"/>\r
- <DESCRIPTIONELEMENTBASE DTYPE="Distribution" ID="18" INDESCRIPTION_ID="18" CREATED="2008-12-10 09:56:07.0" UUID="4d8eb1b4-0cb4-4880-9f45-471f5f22f34a" UPDATED="2008-12-10 09:56:07.253" FEATURE_ID="[null]" AREA_ID="1970" STATUS_ID="1994"/>\r
- <DESCRIPTIONELEMENTBASE DTYPE="Distribution" ID="19" INDESCRIPTION_ID="19" CREATED="2008-12-10 09:56:07.0" UUID="c47cd92c-0fac-4ed2-83f1-27f83fdb4657" UPDATED="2008-12-10 09:56:07.253" FEATURE_ID="[null]" AREA_ID="1970" STATUS_ID="1994"/>\r
- <DESCRIPTIONELEMENTBASE DTYPE="Distribution" ID="20" INDESCRIPTION_ID="20" CREATED="2008-12-10 09:56:07.0" UUID="eb89b640-16f3-43fa-add1-15d0198b3274" UPDATED="2008-12-10 09:56:07.253" FEATURE_ID="[null]" AREA_ID="1970" STATUS_ID="1994"/>\r
- <DESCRIPTIONELEMENTBASE DTYPE="Distribution" ID="21" INDESCRIPTION_ID="2" CREATED="2008-12-10 09:56:07.0" UUID="83687081-25c8-4493-9472-a48f973fd8f6" UPDATED="2008-12-10 09:56:07.253" FEATURE_ID="[null]" AREA_ID="1968" STATUS_ID="1994"/>\r
- <DESCRIPTIONELEMENTBASE DTYPE="Distribution" ID="22" INDESCRIPTION_ID="4" CREATED="2008-12-10 09:56:07.0" UUID="35174e33-0a19-4d25-8a92-af46e9720120" UPDATED="2008-12-10 09:56:07.253" FEATURE_ID="[null]" AREA_ID="1968" STATUS_ID="1994"/>\r
- <DESCRIPTIONELEMENTBASE DTYPE="Distribution" ID="23" INDESCRIPTION_ID="6" CREATED="2008-12-10 09:56:07.0" UUID="ee5163e6-ce9f-4465-82cd-36f33b857af9" UPDATED="2008-12-10 09:56:07.253" FEATURE_ID="[null]" AREA_ID="1968" STATUS_ID="1994"/>\r
- <DESCRIPTIONELEMENTBASE DTYPE="Distribution" ID="24" INDESCRIPTION_ID="8" CREATED="2008-12-10 09:56:07.0" UUID="54f31cbb-4447-456e-86cd-490848589173" UPDATED="2008-12-10 09:56:07.253" FEATURE_ID="[null]" AREA_ID="1968" STATUS_ID="1994"/>\r
- <DESCRIPTIONELEMENTBASE DTYPE="Distribution" ID="25" INDESCRIPTION_ID="10" CREATED="2008-12-10 09:56:07.0" UUID="b7ed185d-a5df-4d2d-939f-c3fcb0bead69" UPDATED="2008-12-10 09:56:07.253" FEATURE_ID="[null]" AREA_ID="1968" STATUS_ID="1994"/>\r
- <DESCRIPTIONELEMENTBASE DTYPE="Distribution" ID="26" INDESCRIPTION_ID="12" CREATED="2008-12-10 09:56:07.0" UUID="4e077042-d34d-4975-a042-01abdb0631bd" UPDATED="2008-12-10 09:56:07.253" FEATURE_ID="[null]" AREA_ID="1968" STATUS_ID="1994"/>\r
- <DESCRIPTIONELEMENTBASE DTYPE="Distribution" ID="27" INDESCRIPTION_ID="14" CREATED="2008-12-10 09:56:07.0" UUID="a6e78e58-2a8a-4071-8a33-e9c182f72efc" UPDATED="2008-12-10 09:56:07.253" FEATURE_ID="[null]" AREA_ID="1968" STATUS_ID="1994"/>\r
- <DESCRIPTIONELEMENTBASE DTYPE="Distribution" ID="28" INDESCRIPTION_ID="16" CREATED="2008-12-10 09:56:07.0" UUID="8854b227-5bc2-4076-a35c-d14468e5961d" UPDATED="2008-12-10 09:56:07.253" FEATURE_ID="[null]" AREA_ID="1968" STATUS_ID="1994"/>\r
- <DESCRIPTIONELEMENTBASE DTYPE="Distribution" ID="29" INDESCRIPTION_ID="18" CREATED="2008-12-10 09:56:07.0" UUID="c44a489d-ecd2-4c61-9e63-22eb4e9de14e" UPDATED="2008-12-10 09:56:07.253" FEATURE_ID="[null]" AREA_ID="1968" STATUS_ID="1994"/>\r
- <DESCRIPTIONELEMENTBASE DTYPE="Distribution" ID="30" INDESCRIPTION_ID="20" CREATED="2008-12-10 09:56:07.0" UUID="9cfbd03a-4cd7-42fb-86ed-d469b5126caf" UPDATED="2008-12-10 09:56:07.253" FEATURE_ID="[null]" AREA_ID="1968" STATUS_ID="1994"/>\r
- <DESCRIPTIONELEMENTBASE DTYPE="Distribution" ID="31" INDESCRIPTION_ID="31" CREATED="2008-12-10 09:56:07.0" UUID="cc31333c-90b6-4927-9597-1820755ffab3" UPDATED="2008-12-10 09:56:07.253" FEATURE_ID="[null]" AREA_ID="1970" STATUS_ID="1995"/>\r
- <DESCRIPTIONELEMENTBASE DTYPE="Distribution" ID="32" INDESCRIPTION_ID="32" CREATED="2008-12-10 09:56:07.0" UUID="5e49861f-94bc-45c1-a9b4-be9513b91dd9" UPDATED="2008-12-10 09:56:07.253" FEATURE_ID="[null]" AREA_ID="1970" STATUS_ID="1995"/>\r
- <DESCRIPTIONELEMENTBASE DTYPE="Distribution" ID="33" INDESCRIPTION_ID="33" CREATED="2008-12-10 09:56:07.0" UUID="cf4b976f-de07-4e71-ab70-4585062c6ed5" UPDATED="2008-12-10 09:56:07.253" FEATURE_ID="[null]" AREA_ID="1970" STATUS_ID="1995"/>\r
- <DESCRIPTIONELEMENTBASE DTYPE="TextData" ID="34" INDESCRIPTION_ID="1" CREATED="2008-12-10 09:56:07.0" UUID="31a0160a-51b2-4565-85cf-2be58cb561d6" UPDATED="2008-12-10 09:56:07.253" FEATURE_ID="922" AREA_ID="[null]" STATUS_ID="[null]"/>\r
- <DESCRIPTIONELEMENTBASE DTYPE="TextData" ID="35" INDESCRIPTION_ID="1" CREATED="2008-12-10 09:56:07.0" UUID="50f6b799-3585-40a7-b69d-e7be77b2651a" UPDATED="2008-12-10 09:56:07.253" FEATURE_ID="936" AREA_ID="[null]" STATUS_ID="[null]"/>\r
- <DESCRIPTIONELEMENTBASE DTYPE="TextData" ID="36" INDESCRIPTION_ID="2" CREATED="2008-12-10 09:56:07.0" UUID="c1e21ed1-4925-4e85-845f-e4b7a8386a33" UPDATED="2008-12-10 09:56:07.253" FEATURE_ID="936" AREA_ID="[null]" STATUS_ID="[null]"/>\r
- <DESCRIPTIONELEMENTBASE DTYPE="TextData" ID="37" INDESCRIPTION_ID="34" CREATED="2013-06-26 09:56:07.0" UUID="ab7d872a-0793-40b1-ae65-1c574dc09fc4" UPDATED="2013-06-26 09:56:07.253" FEATURE_ID="922" AREA_ID="[null]" STATUS_ID="[null]"/>\r
- <DESCRIPTIONELEMENTBASE_LANGUAGESTRING DESCRIPTIONELEMENTBASE_ID="34" MULTILANGUAGETEXT_ID="1" MULTILANGUAGETEXT_MAPKEY_ID="406"/>\r
- <DESCRIPTIONELEMENTBASE_LANGUAGESTRING DESCRIPTIONELEMENTBASE_ID="35" MULTILANGUAGETEXT_ID="2" MULTILANGUAGETEXT_MAPKEY_ID="406"/>\r
- <DESCRIPTIONELEMENTBASE_LANGUAGESTRING DESCRIPTIONELEMENTBASE_ID="36" MULTILANGUAGETEXT_ID="3" MULTILANGUAGETEXT_MAPKEY_ID="406"/>\r
- <DESCRIPTIONELEMENTBASE_LANGUAGESTRING DESCRIPTIONELEMENTBASE_ID="37" MULTILANGUAGETEXT_ID="4" MULTILANGUAGETEXT_MAPKEY_ID="406"/>\r
- <DESCRIPTIONELEMENTBASE_ORIGINALSOURCEBASE DESCRIPTIONELEMENTBASE_ID="34" SOURCES_ID="1"/>\r
- <HOMOTYPICALGROUP ID="1" CREATED="2008-12-10 09:56:07.0" UUID="7b214eb9-a6ac-48e5-af02-bbea634d2a03" UPDATED="2008-12-10 09:56:07.238"/>\r
- <HOMOTYPICALGROUP ID="2" CREATED="2008-12-10 09:56:07.0" UUID="6c241a4c-e5a0-4344-8e5e-a81f17b75973" UPDATED="2008-12-10 09:56:07.253"/>\r
- <HOMOTYPICALGROUP ID="3" CREATED="2008-12-10 09:56:07.0" UUID="76eac2b8-9c5a-4b25-acd1-e4e0d894106f" UPDATED="2008-12-10 09:56:07.253"/>\r
- <HOMOTYPICALGROUP ID="4" CREATED="2008-12-10 09:56:07.0" UUID="3c6ff240-9cab-4ec9-b47e-97280318ab30" UPDATED="2008-12-10 09:56:07.253"/>\r
- <HOMOTYPICALGROUP ID="5" CREATED="2008-12-10 09:56:07.0" UUID="5cd73df5-1c72-44a6-9864-adb145d8bd56" UPDATED="2008-12-10 09:56:07.253"/>\r
- <HOMOTYPICALGROUP ID="6" CREATED="2008-12-10 09:56:07.0" UUID="335977f0-ef55-4294-b78b-aed47435b428" UPDATED="2008-12-10 09:56:07.253"/>\r
- <HOMOTYPICALGROUP ID="7" CREATED="2008-12-10 09:56:07.0" UUID="e902a44e-7b26-4dc5-8251-d62c48d01bad" UPDATED="2008-12-10 09:56:07.253"/>\r
- <HOMOTYPICALGROUP ID="8" CREATED="2008-12-10 09:56:07.0" UUID="4806e853-d7c3-4216-8fa1-022be728bd3c" UPDATED="2008-12-10 09:56:07.253"/>\r
- <HOMOTYPICALGROUP ID="9" CREATED="2008-12-10 09:56:07.0" UUID="c454bd99-c7f3-43d6-8846-5fcc24f0c31b" UPDATED="2008-12-10 09:56:07.253"/>\r
- <HOMOTYPICALGROUP ID="10" CREATED="2008-12-10 09:56:07.0" UUID="4a92945c-e198-4f59-a19c-717e0b83e9f0" UPDATED="2008-12-10 09:56:07.253"/>\r
- <HOMOTYPICALGROUP ID="11" CREATED="2008-12-10 09:56:07.0" UUID="c5e7f225-60c2-4001-9488-0f584ba522ea" UPDATED="2008-12-10 09:56:07.238"/>\r
- <HOMOTYPICALGROUP ID="12" CREATED="2008-12-10 09:56:07.0" UUID="a4f0abc0-f6c3-42f0-98e9-10aec07415f2" UPDATED="2008-12-10 09:56:07.253"/>\r
- <HOMOTYPICALGROUP ID="13" CREATED="2008-12-10 09:56:07.0" UUID="9eb6d8c4-4b74-481f-9063-6251843606a3" UPDATED="2008-12-10 09:56:07.253"/>\r
- <HOMOTYPICALGROUP ID="14" CREATED="2008-12-10 09:56:07.0" UUID="2addf98d-ab27-4b26-ae1b-06fda059cddd" UPDATED="2008-12-10 09:56:07.253"/>\r
- <HOMOTYPICALGROUP ID="15" CREATED="2008-12-10 09:56:07.0" UUID="399af850-b662-4c3d-9038-ea5219af49de" UPDATED="2008-12-10 09:56:07.253"/>\r
- <HOMOTYPICALGROUP ID="16" CREATED="2008-12-10 09:56:07.0" UUID="ffb3e841-ea6a-4107-8dc7-7ee52f7ae500" UPDATED="2008-12-10 09:56:07.253"/>\r
- <HOMOTYPICALGROUP ID="17" CREATED="2008-12-10 09:56:07.0" UUID="de608141-143c-4337-91e9-4094fe814522" UPDATED="2008-12-10 09:56:07.253"/>\r
- <HOMOTYPICALGROUP ID="18" CREATED="2008-12-10 09:56:07.0" UUID="2f17e98b-d5bd-4c54-ab5c-e0fea4eaaae0" UPDATED="2008-12-10 09:56:07.253"/>\r
- <HOMOTYPICALGROUP ID="19" CREATED="2008-12-10 09:56:07.0" UUID="9a554521-7f2a-451d-8bea-64827d562db9" UPDATED="2008-12-10 09:56:07.253"/>\r
- <HOMOTYPICALGROUP ID="20" CREATED="2008-12-10 09:56:07.0" UUID="7dcc4480-9a81-47a5-9830-0a70c8f64b79" UPDATED="2008-12-10 09:56:07.253"/>\r
- <HOMOTYPICALGROUP ID="21" CREATED="2008-12-10 09:56:07.0" UUID="a779e64e-6d57-406f-97c2-f09187ef9d87" UPDATED="2008-12-10 09:56:07.238"/>\r
- <HOMOTYPICALGROUP ID="22" CREATED="2008-12-10 09:56:07.0" UUID="9b17ca72-bdef-498a-bc76-cdbb2734c08d" UPDATED="2008-12-10 09:56:07.253"/>\r
- <HOMOTYPICALGROUP ID="23" CREATED="2008-12-10 09:56:07.0" UUID="b4406aa9-c923-40e8-b75e-39b434149a03" UPDATED="2008-12-10 09:56:07.253"/>\r
- <HOMOTYPICALGROUP ID="24" CREATED="2008-12-10 09:56:07.0" UUID="422bf053-72bb-4624-ac65-92de413c7ea2" UPDATED="2008-12-10 09:56:07.253"/>\r
- <HOMOTYPICALGROUP ID="25" CREATED="2008-12-10 09:56:07.0" UUID="1cb31fff-de80-455b-898e-7da8eea8ddb4" UPDATED="2008-12-10 09:56:07.253"/>\r
- <HOMOTYPICALGROUP ID="26" CREATED="2008-12-10 09:56:07.0" UUID="7dcdf873-5dfe-4349-9509-3aee3d3f830f" UPDATED="2008-12-10 09:56:07.253"/>\r
- <HOMOTYPICALGROUP ID="27" CREATED="2008-12-10 09:56:07.0" UUID="ff5ccd39-ac81-4859-a158-7487eddfcd2f" UPDATED="2008-12-10 09:56:07.253"/>\r
- <HOMOTYPICALGROUP ID="28" CREATED="2008-12-10 09:56:07.0" UUID="ca6c8000-a5da-4464-8f4b-8c602a7c58df" UPDATED="2008-12-10 09:56:07.253"/>\r
- <HOMOTYPICALGROUP ID="29" CREATED="2008-12-10 09:56:07.0" UUID="4a617bae-ef0d-4f4f-91d3-8f246dea1479" UPDATED="2008-12-10 09:56:07.253"/>\r
- <HOMOTYPICALGROUP ID="30" CREATED="2008-12-10 09:56:07.0" UUID="17fa02ae-3506-4ed3-b79e-611aa862cacc" UPDATED="2008-12-10 09:56:07.253"/>\r
- <HOMOTYPICALGROUP ID="31" CREATED="2008-12-10 09:56:07.0" UUID="cee3baf6-c479-4606-8d5a-bc8380167175" UPDATED="2008-12-10 09:56:07.238"/>\r
- <HOMOTYPICALGROUP ID="32" CREATED="2008-12-10 09:56:07.0" UUID="036286ea-3379-4f86-b100-11179cf2e793" UPDATED="2008-12-10 09:56:07.253"/>\r
- <HOMOTYPICALGROUP ID="33" CREATED="2008-12-10 09:56:07.0" UUID="a218a8e3-70ae-4c58-9463-7725e1b8e112" UPDATED="2008-12-10 09:56:07.253"/>\r
- <HOMOTYPICALGROUP ID="34" CREATED="2008-12-10 09:56:07.0" UUID="5381dcb7-bddf-49d5-8669-1f34d8a43b32" UPDATED="2008-12-10 09:56:07.253"/>\r
- <HOMOTYPICALGROUP ID="35" CREATED="2008-12-10 09:56:07.0" UUID="baf4e929-4291-4635-aa35-1255069eefe6" UPDATED="2008-12-10 09:56:07.253"/>\r
- <HOMOTYPICALGROUP ID="36" CREATED="2008-12-10 09:56:07.0" UUID="b2b007a4-9c8c-43a1-8da4-20ed85464cf2" UPDATED="2008-12-10 09:56:07.253"/>\r
- <HOMOTYPICALGROUP ID="37" CREATED="2008-12-10 09:56:07.0" UUID="c9bb41cf-d577-46d6-932e-45e5d85f573e" UPDATED="2008-12-10 09:56:07.253"/>\r
- <LANGUAGESTRING ID="1" CREATED="2008-12-10 09:56:07.0" UUID="2a5ceebb-4830-4524-b330-78461bf8cb6b" UPDATED="2008-12-10 09:56:07.253" TEXT="Lorem ipsum dolor sit amet, consectetur adipiscing elit." LANGUAGE_ID="1"/>\r
- <LANGUAGESTRING ID="2" CREATED="2008-12-10 09:56:07.0" UUID="373e7154-9372-4985-b77e-68df28e3f84b" UPDATED="2008-12-10 09:56:07.253" TEXT="Praesent vitae turpis vitae sapien sodales sagittis." LANGUAGE_ID="1"/>\r
- <LANGUAGESTRING ID="3" CREATED="2008-12-10 09:56:07.0" UUID="f72f17d8-58c2-4c4e-b052-89d9016b6d02" UPDATED="2008-12-10 09:56:07.253" TEXT="Maecenas congue ligula ut nulla. Nullam commodo euismod dolor." LANGUAGE_ID="1"/>\r
- <LANGUAGESTRING ID="4" CREATED="2013-06-26 09:56:07.0" UUID="5415450b-f9fd-493d-bdd5-623c5fd34254" UPDATED="2008-12-10 09:56:07.253" TEXT="TextData (A) for a SpecimenDescription" LANGUAGE_ID="1"/>\r
- <ORIGINALSOURCEBASE DTYPE="IdentifiableSource" ID="1" UUID="ebe5a015-6f31-4b62-9fef-d0b4bcfb7e5a" SOURCETYPE="PTS" SOURCEDOBJ_TYPE="eu.etaxonomy.cdm.model.description.TextData" SOURCEDOBJ_ID="34" CITATION_ID="1"/>\r
- <REFERENCE ID="1" CREATED="2008-12-10 09:56:07.0" UUID="596b1325-be50-4b0a-9aa2-3ecd610215f2" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Lorem ipsum" PROTECTEDABBREVTITLECACHE="false" ABBREVTITLECACHE="Sp. Pl." ABBREVTITLE="Sp. Pl." NOMENCLATURALLYRELEVANT="false" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1"/>\r
- <REFERENCE ID="2" CREATED="2008-12-10 09:56:07.0" UUID="ad4322b7-4b05-48af-be70-f113e46c545e" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="cate-sphingidae.org" PROTECTEDABBREVTITLECACHE="false" ABBREVTITLECACHE="Sp. Pl." ABBREVTITLE="Sp. Pl." NOMENCLATURALLYRELEVANT="false" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1"/>\r
- <REFERENCE ID="3" CREATED="2008-12-10 09:56:07.0" UUID="3eea6f96-0682-4025-8cdd-aaaf7c915ae2" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="cate-araceae.org" PROTECTEDABBREVTITLECACHE="false" ABBREVTITLECACHE="Sp. Pl." ABBREVTITLE="Sp. Pl." NOMENCLATURALLYRELEVANT="false" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1"/>\r
- <TAXONBASE DTYPE="Taxon" ID="1" CREATED="2008-12-10 09:56:07.0" UUID="496b1325-be50-4b0a-9aa2-3ecd610215f2" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE=" sec. ???" PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" TAXONSTATUSUNKNOWN="false" UNPLACED="false" EXCLUDED="false" TAXONOMICCHILDRENCOUNT="1" NAME_ID="1" SEC_ID="1"/>\r
- <TAXONBASE DTYPE="Taxon" ID="2" CREATED="2008-12-10 09:56:07.0" UUID="822d98dc-9ef7-44b7-a870-94573a3bcb46" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE=" sec. ???" PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" TAXONSTATUSUNKNOWN="false" UNPLACED="false" EXCLUDED="false" TAXONOMICCHILDRENCOUNT="0" NAME_ID="2" SEC_ID="1"/>\r
- <TAXONBASE DTYPE="Taxon" ID="3" CREATED="2008-12-10 09:56:07.0" UUID="54e767ee-894e-4540-a758-f906ecb4e2d9" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Sphingidae Linnaeus, 1758 sec. cate-sphingidae.org" PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" TAXONSTATUSUNKNOWN="false" UNPLACED="false" EXCLUDED="false" TAXONOMICCHILDRENCOUNT="204" NAME_ID="3" SEC_ID="2"/>\r
- <TAXONBASE DTYPE="Taxon" ID="4" CREATED="2008-12-10 09:56:07.0" UUID="ef96fafa-7750-4141-b31b-1ad1daab3e76" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Lathoe Fabricius, 1807 sec. cate-sphingidae.org" PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" TAXONSTATUSUNKNOWN="false" UNPLACED="false" EXCLUDED="false" TAXONOMICCHILDRENCOUNT="6" NAME_ID="4" SEC_ID="2"/>\r
- <TAXONBASE DTYPE="Taxon" ID="5" CREATED="2008-12-10 09:56:07.0" UUID="17233b5e-74e7-42fc-bc37-522684657ed4" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Smerinthus Latreille, 1802 sec. cate-sphingidae.org" PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" TAXONSTATUSUNKNOWN="false" UNPLACED="false" EXCLUDED="false" TAXONOMICCHILDRENCOUNT="15" NAME_ID="5" SEC_ID="2"/>\r
- <TAXONBASE DTYPE="Taxon" ID="6" CREATED="2008-12-10 09:56:07.0" UUID="b989a278-c414-49f7-9a10-7d784700e4c4" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Manduca Hübner, 1807 sec. cate-sphingidae.org" PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" TAXONSTATUSUNKNOWN="false" UNPLACED="false" EXCLUDED="false" TAXONOMICCHILDRENCOUNT="18" NAME_ID="6" SEC_ID="2"/>\r
- <TAXONBASE DTYPE="Taxon" ID="7" CREATED="2008-12-10 09:56:07.0" UUID="15611343-6b11-487f-8233-4756a49a83e2" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Lepchina Oberthür, 1904 sec. cate-sphingidae.org" PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" TAXONSTATUSUNKNOWN="false" UNPLACED="false" EXCLUDED="false" TAXONOMICCHILDRENCOUNT="13" NAME_ID="7" SEC_ID="2"/>\r
- <TAXONBASE DTYPE="Taxon" ID="8" CREATED="2008-12-10 09:56:07.0" UUID="1489d3dd-71da-4b34-aa5a-d15fccb6bb22" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Smerinthus kindermannii Lederer, 1853 sec. cate-sphingidae.org" PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" TAXONSTATUSUNKNOWN="false" UNPLACED="false" EXCLUDED="false" TAXONOMICCHILDRENCOUNT="4" NAME_ID="8" SEC_ID="2"/>\r
- <TAXONBASE DTYPE="Taxon" ID="9" CREATED="2008-12-10 09:56:07.0" UUID="900052b7-b69c-4e26-a8f0-01c215214c40" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Mimas Hübner, 1819 sec. cate-sphingidae.org" PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" TAXONSTATUSUNKNOWN="false" UNPLACED="false" EXCLUDED="false" TAXONOMICCHILDRENCOUNT="2" NAME_ID="9" SEC_ID="2"/>\r
- <TAXONBASE DTYPE="Taxon" ID="10" CREATED="2008-12-10 09:56:07.0" UUID="8e312b40-924f-46b7-8e8d-837f9ad12f51" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Callambulyx Rothschild & Jordan, 1903 sec. cate-sphingidae.org" PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" TAXONSTATUSUNKNOWN="false" UNPLACED="false" EXCLUDED="false" TAXONOMICCHILDRENCOUNT="9" NAME_ID="10" SEC_ID="2"/>\r
- <TAXONBASE DTYPE="Taxon" ID="11" CREATED="2008-12-10 09:56:07.0" UUID="53fac190-0b4b-44f5-b4e7-b1ca9a25a6e9" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Dolbina Staudinger, 1877 sec. cate-sphingidae.org" PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" TAXONSTATUSUNKNOWN="false" UNPLACED="false" EXCLUDED="false" TAXONOMICCHILDRENCOUNT="7" NAME_ID="11" SEC_ID="2"/>\r
- <TAXONBASE DTYPE="Taxon" ID="12" CREATED="2008-12-10 09:56:07.0" UUID="7748d6f0-04d8-4052-9904-c43f55682419" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Akbesia Rothschild & Jordan, 1903 sec. cate-sphingidae.org" PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" TAXONSTATUSUNKNOWN="false" UNPLACED="false" EXCLUDED="false" TAXONOMICCHILDRENCOUNT="1" NAME_ID="12" SEC_ID="2"/>\r
- <TAXONBASE DTYPE="Taxon" ID="13" CREATED="2008-12-10 09:56:07.0" UUID="63f251fa-f283-46bb-ad42-7390f0a1e806" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Sphinx Linnaeus, 1758 sec. cate-sphingidae.org" PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" TAXONSTATUSUNKNOWN="false" UNPLACED="false" EXCLUDED="false" TAXONOMICCHILDRENCOUNT="49" NAME_ID="13" SEC_ID="2"/>\r
- <TAXONBASE DTYPE="Taxon" ID="14" CREATED="2008-12-10 09:56:07.0" UUID="00245994-149e-4cc4-8186-aefd48d4acf8" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Agrius Hübner, 1819 sec. cate-sphingidae.org" PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" TAXONSTATUSUNKNOWN="false" UNPLACED="false" EXCLUDED="false" TAXONOMICCHILDRENCOUNT="6" NAME_ID="14" SEC_ID="2"/>\r
- <TAXONBASE DTYPE="Taxon" ID="15" CREATED="2008-12-10 09:56:07.0" UUID="c5cc8674-4242-49a4-aada-72d63194f5fa" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Acherontia Laspeyres, 1809 sec. cate-sphingidae.org" PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" TAXONSTATUSUNKNOWN="false" UNPLACED="false" EXCLUDED="false" TAXONOMICCHILDRENCOUNT="3" NAME_ID="15" SEC_ID="2"/>\r
- <TAXONBASE DTYPE="Taxon" ID="16" CREATED="2008-12-10 09:56:07.0" UUID="6ecc117a-3e9a-4030-8748-f63a0412e065" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Hemaris Dalman, 1816 sec. cate-sphingidae.org" PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" TAXONSTATUSUNKNOWN="false" UNPLACED="false" EXCLUDED="false" TAXONOMICCHILDRENCOUNT="23" NAME_ID="16" SEC_ID="2"/>\r
- <TAXONBASE DTYPE="Taxon" ID="17" CREATED="2008-12-10 09:56:07.0" UUID="f6700b5b-b6dc-421a-b979-9429ffad8262" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Proserpinus Hübner, 1819 sec. cate-sphingidae.org" PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" TAXONSTATUSUNKNOWN="false" UNPLACED="false" EXCLUDED="false" TAXONOMICCHILDRENCOUNT="7" NAME_ID="17" SEC_ID="2"/>\r
- <TAXONBASE DTYPE="Taxon" ID="18" CREATED="2008-12-10 09:56:07.0" UUID="b503efaf-b800-421b-beba-3c6fab4b3c34" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Sphingonaepiopsis Wallengren, 1858 sec. cate-sphingidae.org" PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" TAXONSTATUSUNKNOWN="false" UNPLACED="false" EXCLUDED="false" TAXONOMICCHILDRENCOUNT="7" NAME_ID="18" SEC_ID="2"/>\r
- <TAXONBASE DTYPE="Taxon" ID="19" CREATED="2008-12-10 09:56:07.0" UUID="a9f42927-e507-4fda-9629-62073a908aae" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Rethera Rothschild & Jordan, 1903 sec. cate-sphingidae.org" PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" TAXONSTATUSUNKNOWN="false" UNPLACED="false" EXCLUDED="false" TAXONOMICCHILDRENCOUNT="4" NAME_ID="19" SEC_ID="2"/>\r
- <TAXONBASE DTYPE="Taxon" ID="20" CREATED="2008-12-10 09:56:07.0" UUID="557ac748-90df-47a6-b6f4-92d7b1d53abb" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Daphnis Hübner, 1819 sec. cate-sphingidae.org" PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" TAXONSTATUSUNKNOWN="false" UNPLACED="false" EXCLUDED="false" TAXONOMICCHILDRENCOUNT="10" NAME_ID="20" SEC_ID="2"/>\r
- <TAXONBASE DTYPE="Taxon" ID="21" CREATED="2008-12-10 09:56:07.0" UUID="c089d514-f599-4f5a-bc90-3a11176d0f76" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Clarina Tutt, 1903 sec. cate-sphingidae.org" PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" TAXONSTATUSUNKNOWN="false" UNPLACED="false" EXCLUDED="false" TAXONOMICCHILDRENCOUNT="2" NAME_ID="21" SEC_ID="2"/>\r
- <TAXONBASE DTYPE="Taxon" ID="22" CREATED="2008-12-10 09:56:07.0" UUID="74ad1d5e-4f73-4e0d-a209-4bf07abd33fa" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Acosmeryx Boisduval, 1875 sec. cate-sphingidae.org" PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" TAXONSTATUSUNKNOWN="false" UNPLACED="false" EXCLUDED="false" TAXONOMICCHILDRENCOUNT="12" NAME_ID="22" SEC_ID="2"/>\r
- <TAXONBASE DTYPE="Taxon" ID="23" CREATED="2008-12-10 09:56:07.0" UUID="8ecb0dfa-31fd-4f5a-bb83-b897cda813db" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Macroglossum Scopoli, 1777 sec. cate-sphingidae.org" PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" TAXONSTATUSUNKNOWN="false" UNPLACED="false" EXCLUDED="false" TAXONOMICCHILDRENCOUNT="104" NAME_ID="23" SEC_ID="2"/>\r
- <TAXONBASE DTYPE="Taxon" ID="24" CREATED="2008-12-10 09:56:07.0" UUID="3d2a3441-4602-405f-8ba7-0685d88d7235" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Hyles Hübner, 1819 sec. cate-sphingidae.org" PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" TAXONSTATUSUNKNOWN="false" UNPLACED="false" EXCLUDED="false" TAXONOMICCHILDRENCOUNT="31" NAME_ID="24" SEC_ID="2"/>\r
- <TAXONBASE DTYPE="Taxon" ID="25" CREATED="2008-12-10 09:56:07.0" UUID="4b47c134-0c99-43c9-a046-620a195cd69e" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Deilephila Laspeyres, 1809 sec. cate-sphingidae.org" PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" TAXONSTATUSUNKNOWN="false" UNPLACED="false" EXCLUDED="false" TAXONOMICCHILDRENCOUNT="4" NAME_ID="25" SEC_ID="2"/>\r
- <TAXONBASE DTYPE="Taxon" ID="26" CREATED="2008-12-10 09:56:07.0" UUID="7832c932-f687-4180-a808-fa82d57a9ac8" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Hippotion Hübner, 1819 sec. cate-sphingidae.org" PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" TAXONSTATUSUNKNOWN="false" UNPLACED="false" EXCLUDED="false" TAXONOMICCHILDRENCOUNT="40" NAME_ID="26" SEC_ID="2"/>\r
- <TAXONBASE DTYPE="Synonym" ID="27" CREATED="2008-12-10 09:56:07.0" UUID="d75b2e3d-7394-4ada-b6a5-93175b8751c1" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Atropos Agassiz, 1846 sec. cate-sphingidae.org" PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" TAXONSTATUSUNKNOWN="false" UNPLACED="false" EXCLUDED="false" TAXONOMICCHILDRENCOUNT="[null]" NAME_ID="27" SEC_ID="2"/>\r
- <TAXONBASE DTYPE="Synonym" ID="28" CREATED="2008-12-10 09:56:07.0" UUID="6bfedf25-6dbc-4d5c-9d56-84f9052f3b2a" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Atropos Oken, 1815 sec. cate-sphingidae.org" PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" TAXONSTATUSUNKNOWN="false" UNPLACED="false" EXCLUDED="false" TAXONOMICCHILDRENCOUNT="[null]" NAME_ID="28" SEC_ID="2"/>\r
- <TAXONBASE DTYPE="Synonym" ID="29" CREATED="2008-12-10 09:56:07.0" UUID="b3cc5671-5082-4e67-9310-aa88b331f3c7" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Brachyglossa Boisduval, 1828 sec. cate-sphingidae.org" PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" TAXONSTATUSUNKNOWN="false" UNPLACED="false" EXCLUDED="false" TAXONOMICCHILDRENCOUNT="[null]" NAME_ID="29" SEC_ID="2"/>\r
- <TAXONBASE DTYPE="Synonym" ID="30" CREATED="2008-12-10 09:56:07.0" UUID="f017e915-0266-4f6d-8db4-eff4e8d6af5c" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Manduca Hübner, 1806 sec. cate-sphingidae.org" PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" TAXONSTATUSUNKNOWN="false" UNPLACED="false" EXCLUDED="false" TAXONOMICCHILDRENCOUNT="[null]" NAME_ID="30" SEC_ID="2"/>\r
- <TAXONBASE DTYPE="Synonym" ID="31" CREATED="2008-12-10 09:56:07.0" UUID="3da4ab34-6c50-4586-801e-732615899b07" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Atropos Leach, 1815 sec. cate-sphingidae.org" PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" TAXONSTATUSUNKNOWN="false" UNPLACED="false" EXCLUDED="false" TAXONOMICCHILDRENCOUNT="[null]" NAME_ID="31" SEC_ID="2"/>\r
- <TAXONBASE DTYPE="Taxon" ID="32" CREATED="2008-12-10 09:56:07.0" UUID="d88aa25c-7984-4870-bc9c-821f094d3a48" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Manduca afflicta (Grote, 1865) sec. cate-sphingidae.org" PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" TAXONSTATUSUNKNOWN="false" UNPLACED="false" EXCLUDED="false" TAXONOMICCHILDRENCOUNT="1" NAME_ID="32" SEC_ID="2"/>\r
- <TAXONBASE DTYPE="Taxon" ID="33" CREATED="2008-12-10 09:56:07.0" UUID="2c41e444-b160-4c6a-a1be-d5317d97d68d" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Manduca bergarmatipes (Clark, 1927) sec. cate-sphingidae.org" PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" TAXONSTATUSUNKNOWN="false" UNPLACED="false" EXCLUDED="false" TAXONOMICCHILDRENCOUNT="0" NAME_ID="33" SEC_ID="2"/>\r
- <TAXONBASE DTYPE="Taxon" ID="34" CREATED="2008-12-10 09:56:07.0" UUID="7fe66bfd-235b-4164-8f0a-d054b5e962ba" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Manduca chinchilla (Gehlen, 1942) sec. cate-sphingidae.org" PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" TAXONSTATUSUNKNOWN="false" UNPLACED="false" EXCLUDED="false" TAXONOMICCHILDRENCOUNT="0" NAME_ID="34" SEC_ID="2"/>\r
- <TAXONBASE DTYPE="Taxon" ID="35" CREATED="2008-12-10 09:56:07.0" UUID="4cab3cc5-eb80-477c-ac1b-be3c3d0a5a85" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Acherontia atropos (Linnaeus, 1758) sec. cate-sphingidae.org" PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" TAXONSTATUSUNKNOWN="false" UNPLACED="false" EXCLUDED="false" TAXONOMICCHILDRENCOUNT="0" NAME_ID="35" SEC_ID="2"/>\r
- <TAXONBASE DTYPE="Taxon" ID="36" CREATED="2008-12-10 09:56:07.0" UUID="b04cc9cb-2b4a-4cc4-a94a-3c93a2158b06" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Acherontia lachesis (Fabricius, 1798) sec. cate-sphingidae.org" PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" TAXONSTATUSUNKNOWN="false" UNPLACED="false" EXCLUDED="false" TAXONOMICCHILDRENCOUNT="1" NAME_ID="36" SEC_ID="2"/>\r
- <TAXONBASE DTYPE="Taxon" ID="37" CREATED="2008-12-10 09:56:07.0" UUID="7b8b5cb3-37ba-4dba-91ac-4c6ffd6ac331" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Acherontia styx Westwood, 1847 sec. cate-sphingidae.org" PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" TAXONSTATUSUNKNOWN="false" UNPLACED="false" EXCLUDED="false" TAXONOMICCHILDRENCOUNT="1" NAME_ID="37" SEC_ID="2"/>\r
- <TAXONBASE DTYPE="Taxon" ID="38" CREATED="2008-12-10 09:56:07.0" UUID="bc09aca6-06fd-4905-b1e7-cbf7cc65d783" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Cryptocoryne x purpurea nothovar borneoensis N.Jacobsen, Bastm. & Yuji Sasaki sec. cate-sphingidae.org" PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" TAXONSTATUSUNKNOWN="false" UNPLACED="false" EXCLUDED="false" TAXONOMICCHILDRENCOUNT="0" NAME_ID="38" SEC_ID="3"/>\r
- <TAXONNAMEBASE DTYPE="BotanicalName" ID="1" CREATED="2008-12-10 09:56:07.0" UUID="a49a3963-c4ea-4047-8588-2f8f15352730" UPDATED="2008-12-10 09:56:07.238" PROTECTEDTITLECACHE="true" TITLECACHE="" FULLTITLECACHE="" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1" PROTECTEDFULLTITLECACHE="true" AUTHORSHIPCACHE="" BINOMHYBRID="false" GENUSORUNINOMIAL="Aus" HYBRIDFORMULA="false" MONOMHYBRID="false" NAMECACHE="Aus" PROTECTEDAUTHORSHIPCACHE="true" PROTECTEDNAMECACHE="true" TRINOMHYBRID="false" ANAMORPHIC="false" HOMOTYPICALGROUP_ID="1" NOMENCLATURALREFERENCE_ID="1" RANK_ID="774"/>\r
- <TAXONNAMEBASE DTYPE="BotanicalName" ID="2" CREATED="2008-12-10 09:56:07.0" UUID="05a438d6-065f-49ef-84db-c7dc2c259975" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE=" " FULLTITLECACHE=" " PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1" PROTECTEDFULLTITLECACHE="true" AUTHORSHIPCACHE="" BINOMHYBRID="false" GENUSORUNINOMIAL="Aus" HYBRIDFORMULA="false" MONOMHYBRID="false" NAMECACHE="Aus aus" PROTECTEDAUTHORSHIPCACHE="true" PROTECTEDNAMECACHE="true" TRINOMHYBRID="false" ANAMORPHIC="false" HOMOTYPICALGROUP_ID="2" NOMENCLATURALREFERENCE_ID="1" RANK_ID="765"/>\r
- <TAXONNAMEBASE DTYPE="ZoologicalName" ID="3" CREATED="2008-12-10 09:56:07.0" UUID="9640a158-2bdb-4cbc-bff6-8f77e781f86b" UPDATED="2008-12-10 09:56:07.238" PROTECTEDTITLECACHE="true" TITLECACHE="Sphingidae Linnaeus, 1758" FULLTITLECACHE="" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1" PROTECTEDFULLTITLECACHE="true" AUTHORSHIPCACHE="Linnaeus, 1758" BINOMHYBRID="false" GENUSORUNINOMIAL="Sphingidae" HYBRIDFORMULA="false" MONOMHYBRID="false" NAMECACHE="Sphingidae" PROTECTEDAUTHORSHIPCACHE="true" PROTECTEDNAMECACHE="true" TRINOMHYBRID="false" ANAMORPHIC="false" HOMOTYPICALGROUP_ID="3" NOMENCLATURALREFERENCE_ID="1" RANK_ID="782"/>\r
- <TAXONNAMEBASE DTYPE="ZoologicalName" ID="4" CREATED="2008-12-10 09:56:07.0" UUID="446d8d76-e206-49e1-b6da-d06ce1f296e1" UPDATED="2008-12-10 09:56:07.238" PROTECTEDTITLECACHE="true" TITLECACHE="Lathoe Fabricius, 1807" FULLTITLECACHE="" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1" PROTECTEDFULLTITLECACHE="true" AUTHORSHIPCACHE="Fabricius, 1807" BINOMHYBRID="false" GENUSORUNINOMIAL="Laothoe" HYBRIDFORMULA="false" MONOMHYBRID="false" NAMECACHE="Laothoe" PROTECTEDAUTHORSHIPCACHE="true" PROTECTEDNAMECACHE="true" TRINOMHYBRID="false" ANAMORPHIC="false" HOMOTYPICALGROUP_ID="4" NOMENCLATURALREFERENCE_ID="1" RANK_ID="774"/>\r
- <TAXONNAMEBASE DTYPE="ZoologicalName" ID="5" CREATED="2008-12-10 09:56:07.0" UUID="3d0b9061-fc9d-4de5-9dc1-341e10eb139e" UPDATED="2008-12-10 09:56:07.238" PROTECTEDTITLECACHE="true" TITLECACHE="Smerinthus Latreille, 1802" FULLTITLECACHE="" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1" PROTECTEDFULLTITLECACHE="true" AUTHORSHIPCACHE="Latreille, 1802" BINOMHYBRID="false" GENUSORUNINOMIAL="Smerinthus" HYBRIDFORMULA="false" MONOMHYBRID="false" NAMECACHE="Smerinthus" PROTECTEDAUTHORSHIPCACHE="true" PROTECTEDNAMECACHE="true" TRINOMHYBRID="false" ANAMORPHIC="false" HOMOTYPICALGROUP_ID="5" NOMENCLATURALREFERENCE_ID="1" RANK_ID="774"/>\r
- <TAXONNAMEBASE DTYPE="ZoologicalName" ID="6" CREATED="2008-12-10 09:56:07.0" UUID="e4d3c75c-3bfb-451e-ade1-e5e0307879dd" UPDATED="2008-12-10 09:56:07.238" PROTECTEDTITLECACHE="true" TITLECACHE="Manduca Hübner, 1807" FULLTITLECACHE="" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1" PROTECTEDFULLTITLECACHE="true" AUTHORSHIPCACHE="Hübner, 1807" BINOMHYBRID="false" GENUSORUNINOMIAL="Manduca" HYBRIDFORMULA="false" MONOMHYBRID="false" NAMECACHE="Manduca" PROTECTEDAUTHORSHIPCACHE="true" PROTECTEDNAMECACHE="true" TRINOMHYBRID="false" ANAMORPHIC="false" HOMOTYPICALGROUP_ID="6" NOMENCLATURALREFERENCE_ID="1" RANK_ID="774"/>\r
- <TAXONNAMEBASE DTYPE="ZoologicalName" ID="7" CREATED="2008-12-10 09:56:07.0" UUID="9dcb7f80-05c6-4eb0-bc04-8a72353a67d7" UPDATED="2008-12-10 09:56:07.238" PROTECTEDTITLECACHE="true" TITLECACHE="Lepchina Oberthür, 1904" FULLTITLECACHE="" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1" PROTECTEDFULLTITLECACHE="true" AUTHORSHIPCACHE="Oberthür, 1904" BINOMHYBRID="false" GENUSORUNINOMIAL="Lepchina" HYBRIDFORMULA="false" MONOMHYBRID="false" NAMECACHE="Lepchina" PROTECTEDAUTHORSHIPCACHE="true" PROTECTEDNAMECACHE="true" TRINOMHYBRID="false" ANAMORPHIC="false" HOMOTYPICALGROUP_ID="7" NOMENCLATURALREFERENCE_ID="1" RANK_ID="774"/>\r
- <TAXONNAMEBASE DTYPE="ZoologicalName" ID="8" CREATED="2008-12-10 09:56:07.0" UUID="f2983a50-5121-4641-a9ab-0507821b7563" UPDATED="2008-12-10 09:56:07.238" PROTECTEDTITLECACHE="true" TITLECACHE="Smerinthus kindermannii Lederer, 1853" FULLTITLECACHE="" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1" PROTECTEDFULLTITLECACHE="true" AUTHORSHIPCACHE="Lederer, 1853" BINOMHYBRID="false" GENUSORUNINOMIAL="Smerinthus" HYBRIDFORMULA="false" MONOMHYBRID="false" NAMECACHE="Smerinthus kindermannii" PROTECTEDAUTHORSHIPCACHE="true" PROTECTEDNAMECACHE="true" TRINOMHYBRID="false" ANAMORPHIC="false" HOMOTYPICALGROUP_ID="8" NOMENCLATURALREFERENCE_ID="1" RANK_ID="765"/>\r
- <TAXONNAMEBASE DTYPE="ZoologicalName" ID="9" CREATED="2008-12-10 09:56:07.0" UUID="73a90270-16bb-43f6-b7f0-305b617c9971" UPDATED="2008-12-10 09:56:07.238" PROTECTEDTITLECACHE="true" TITLECACHE="Mimas Hübner, 1819" FULLTITLECACHE="" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1" PROTECTEDFULLTITLECACHE="true" AUTHORSHIPCACHE="Hübner, 1819" BINOMHYBRID="false" GENUSORUNINOMIAL="Mimas" HYBRIDFORMULA="false" MONOMHYBRID="false" NAMECACHE="Mimas" PROTECTEDAUTHORSHIPCACHE="true" PROTECTEDNAMECACHE="true" TRINOMHYBRID="false" ANAMORPHIC="false" HOMOTYPICALGROUP_ID="9" NOMENCLATURALREFERENCE_ID="1" RANK_ID="774"/>\r
- <TAXONNAMEBASE DTYPE="ZoologicalName" ID="10" CREATED="2008-12-10 09:56:07.0" UUID="a67e7431-5c04-4cb6-b83e-c50c439561fe" UPDATED="2008-12-10 09:56:07.238" PROTECTEDTITLECACHE="true" TITLECACHE="Callambulyx Rothschild & Jordan, 1903" FULLTITLECACHE="" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1" PROTECTEDFULLTITLECACHE="true" AUTHORSHIPCACHE="Rothschild & Jordan, 1903" BINOMHYBRID="false" GENUSORUNINOMIAL="Callambulyx" HYBRIDFORMULA="false" MONOMHYBRID="false" NAMECACHE="Callambulyx" PROTECTEDAUTHORSHIPCACHE="true" PROTECTEDNAMECACHE="true" TRINOMHYBRID="false" ANAMORPHIC="false" HOMOTYPICALGROUP_ID="10" NOMENCLATURALREFERENCE_ID="1" RANK_ID="774"/>\r
- <TAXONNAMEBASE DTYPE="ZoologicalName" ID="11" CREATED="2008-12-10 09:56:07.0" UUID="8e6aaf9b-5b99-4525-873b-f535d35834ac" UPDATED="2008-12-10 09:56:07.238" PROTECTEDTITLECACHE="true" TITLECACHE="Dolbina Staudinger, 1877" FULLTITLECACHE="" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1" PROTECTEDFULLTITLECACHE="true" AUTHORSHIPCACHE="Staudinger, 1877" BINOMHYBRID="false" GENUSORUNINOMIAL="Dolbina" HYBRIDFORMULA="false" MONOMHYBRID="false" NAMECACHE="Dolbina" PROTECTEDAUTHORSHIPCACHE="true" PROTECTEDNAMECACHE="true" TRINOMHYBRID="false" ANAMORPHIC="false" HOMOTYPICALGROUP_ID="11" NOMENCLATURALREFERENCE_ID="1" RANK_ID="774"/>\r
- <TAXONNAMEBASE DTYPE="ZoologicalName" ID="12" CREATED="2008-12-10 09:56:07.0" UUID="c1864a99-c025-47e1-87f5-5917b905cca1" UPDATED="2008-12-10 09:56:07.238" PROTECTEDTITLECACHE="true" TITLECACHE="Akbesia Rothschild & Jordan, 1903" FULLTITLECACHE="" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1" PROTECTEDFULLTITLECACHE="true" AUTHORSHIPCACHE="Rothschild & Jordan, 1903" BINOMHYBRID="false" GENUSORUNINOMIAL="Akbesia" HYBRIDFORMULA="false" MONOMHYBRID="false" NAMECACHE="Akbesia" PROTECTEDAUTHORSHIPCACHE="true" PROTECTEDNAMECACHE="true" TRINOMHYBRID="false" ANAMORPHIC="false" HOMOTYPICALGROUP_ID="12" NOMENCLATURALREFERENCE_ID="1" RANK_ID="774"/>\r
- <TAXONNAMEBASE DTYPE="ZoologicalName" ID="13" CREATED="2008-12-10 09:56:07.0" UUID="071a336b-3f31-44d8-bc81-4505dd7ca50b" UPDATED="2008-12-10 09:56:07.238" PROTECTEDTITLECACHE="true" TITLECACHE="Sphinx Linnaeus, 1758" FULLTITLECACHE="" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1" PROTECTEDFULLTITLECACHE="true" AUTHORSHIPCACHE="Linnaeus, 1758" BINOMHYBRID="false" GENUSORUNINOMIAL="Sphinx" HYBRIDFORMULA="false" MONOMHYBRID="false" NAMECACHE="Sphinx" PROTECTEDAUTHORSHIPCACHE="true" PROTECTEDNAMECACHE="true" TRINOMHYBRID="false" ANAMORPHIC="false" HOMOTYPICALGROUP_ID="13" NOMENCLATURALREFERENCE_ID="1" RANK_ID="774"/>\r
- <TAXONNAMEBASE DTYPE="ZoologicalName" ID="14" CREATED="2008-12-10 09:56:07.0" UUID="e8bf37f5-5b87-43dc-8481-a2e58e4f1e71" UPDATED="2008-12-10 09:56:07.238" PROTECTEDTITLECACHE="true" TITLECACHE="Agrius Hübner, 1819" FULLTITLECACHE="" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1" PROTECTEDFULLTITLECACHE="true" AUTHORSHIPCACHE="Hübner, 1819" BINOMHYBRID="false" GENUSORUNINOMIAL="Agrius" HYBRIDFORMULA="false" MONOMHYBRID="false" NAMECACHE="Agrius" PROTECTEDAUTHORSHIPCACHE="true" PROTECTEDNAMECACHE="true" TRINOMHYBRID="false" ANAMORPHIC="false" HOMOTYPICALGROUP_ID="14" NOMENCLATURALREFERENCE_ID="1" RANK_ID="774"/>\r
- <TAXONNAMEBASE DTYPE="ZoologicalName" ID="15" CREATED="2008-12-10 09:56:07.0" UUID="c2cab2ad-3e3a-47b8-8aa8-d9e1c0857647" UPDATED="2008-12-10 09:56:07.238" PROTECTEDTITLECACHE="true" TITLECACHE="Acherontia Laspeyres, 1809" FULLTITLECACHE="" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1" PROTECTEDFULLTITLECACHE="true" AUTHORSHIPCACHE="Laspeyres, 1809" BINOMHYBRID="false" GENUSORUNINOMIAL="Acherontia" HYBRIDFORMULA="false" MONOMHYBRID="false" NAMECACHE="Acherontia" PROTECTEDAUTHORSHIPCACHE="true" PROTECTEDNAMECACHE="true" TRINOMHYBRID="false" ANAMORPHIC="false" HOMOTYPICALGROUP_ID="15" NOMENCLATURALREFERENCE_ID="1" RANK_ID="774"/>\r
- <TAXONNAMEBASE DTYPE="ZoologicalName" ID="16" CREATED="2008-12-10 09:56:07.0" UUID="3d54aed8-7caa-4c74-bbe2-7b946b63f39b" UPDATED="2008-12-10 09:56:07.238" PROTECTEDTITLECACHE="true" TITLECACHE="Hemaris Dalman, 1816" FULLTITLECACHE="" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1" PROTECTEDFULLTITLECACHE="true" AUTHORSHIPCACHE="Dalman, 1816" BINOMHYBRID="false" GENUSORUNINOMIAL="Hemaris" HYBRIDFORMULA="false" MONOMHYBRID="false" NAMECACHE="Hemaris" PROTECTEDAUTHORSHIPCACHE="true" PROTECTEDNAMECACHE="true" TRINOMHYBRID="false" ANAMORPHIC="false" HOMOTYPICALGROUP_ID="16" NOMENCLATURALREFERENCE_ID="1" RANK_ID="774"/>\r
- <TAXONNAMEBASE DTYPE="ZoologicalName" ID="17" CREATED="2008-12-10 09:56:07.0" UUID="2a91a640-ab5a-4993-a58d-a07c0f2ecba3" UPDATED="2008-12-10 09:56:07.238" PROTECTEDTITLECACHE="true" TITLECACHE="Proserpinus Hübner, 1819" FULLTITLECACHE="" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1" PROTECTEDFULLTITLECACHE="true" AUTHORSHIPCACHE="Hübner, 1819" BINOMHYBRID="false" GENUSORUNINOMIAL="Proserpinus" HYBRIDFORMULA="false" MONOMHYBRID="false" NAMECACHE="Proserpinus" PROTECTEDAUTHORSHIPCACHE="true" PROTECTEDNAMECACHE="true" TRINOMHYBRID="false" ANAMORPHIC="false" HOMOTYPICALGROUP_ID="17" NOMENCLATURALREFERENCE_ID="1" RANK_ID="774"/>\r
- <TAXONNAMEBASE DTYPE="ZoologicalName" ID="18" CREATED="2008-12-10 09:56:07.0" UUID="3f159abb-55fa-4c62-966d-3ff1ebc7b34b" UPDATED="2008-12-10 09:56:07.238" PROTECTEDTITLECACHE="true" TITLECACHE="Sphingonaepiopsis Wallengren, 1858" FULLTITLECACHE="" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1" PROTECTEDFULLTITLECACHE="true" AUTHORSHIPCACHE="Wallengren, 1858" BINOMHYBRID="false" GENUSORUNINOMIAL="Sphingonaepiopsis" HYBRIDFORMULA="false" MONOMHYBRID="false" NAMECACHE="Sphingonaepiopsis" PROTECTEDAUTHORSHIPCACHE="true" PROTECTEDNAMECACHE="true" TRINOMHYBRID="false" ANAMORPHIC="false" HOMOTYPICALGROUP_ID="18" NOMENCLATURALREFERENCE_ID="1" RANK_ID="774"/>\r
- <TAXONNAMEBASE DTYPE="ZoologicalName" ID="19" CREATED="2008-12-10 09:56:07.0" UUID="2910ccab-35ea-45bb-ba1a-e8bceed11bd2" UPDATED="2008-12-10 09:56:07.238" PROTECTEDTITLECACHE="true" TITLECACHE="Rethera Rothschild & Jordan, 1903" FULLTITLECACHE="" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1" PROTECTEDFULLTITLECACHE="true" AUTHORSHIPCACHE="Rothschild & Jordan, 1903" BINOMHYBRID="false" GENUSORUNINOMIAL="Rethera" HYBRIDFORMULA="false" MONOMHYBRID="false" NAMECACHE="Rethera" PROTECTEDAUTHORSHIPCACHE="true" PROTECTEDNAMECACHE="true" TRINOMHYBRID="false" ANAMORPHIC="false" HOMOTYPICALGROUP_ID="19" NOMENCLATURALREFERENCE_ID="1" RANK_ID="774"/>\r
- <TAXONNAMEBASE DTYPE="ZoologicalName" ID="20" CREATED="2008-12-10 09:56:07.0" UUID="e400203b-9b0f-4bc7-8aea-9f060de276de" UPDATED="2008-12-10 09:56:07.238" PROTECTEDTITLECACHE="true" TITLECACHE="Daphnis Hübner, 1819" FULLTITLECACHE="" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1" PROTECTEDFULLTITLECACHE="true" AUTHORSHIPCACHE="Hübner, 1819" BINOMHYBRID="false" GENUSORUNINOMIAL="Daphnis" HYBRIDFORMULA="false" MONOMHYBRID="false" NAMECACHE="Rethera" PROTECTEDAUTHORSHIPCACHE="true" PROTECTEDNAMECACHE="true" TRINOMHYBRID="false" ANAMORPHIC="false" HOMOTYPICALGROUP_ID="20" NOMENCLATURALREFERENCE_ID="1" RANK_ID="774"/>\r
- <TAXONNAMEBASE DTYPE="ZoologicalName" ID="21" CREATED="2008-12-10 09:56:07.0" UUID="66354004-1ae2-4aa0-b4d6-d2c6c15a2fb5" UPDATED="2008-12-10 09:56:07.238" PROTECTEDTITLECACHE="true" TITLECACHE="Clarina Tutt, 1903" FULLTITLECACHE="" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1" PROTECTEDFULLTITLECACHE="true" AUTHORSHIPCACHE="Tutt, 1903" BINOMHYBRID="false" GENUSORUNINOMIAL="Clarina" HYBRIDFORMULA="false" MONOMHYBRID="false" NAMECACHE="Clarina" PROTECTEDAUTHORSHIPCACHE="true" PROTECTEDNAMECACHE="true" TRINOMHYBRID="false" ANAMORPHIC="false" HOMOTYPICALGROUP_ID="21" NOMENCLATURALREFERENCE_ID="1" RANK_ID="774"/>\r
- <TAXONNAMEBASE DTYPE="ZoologicalName" ID="22" CREATED="2008-12-10 09:56:07.0" UUID="f57b8d58-e89d-40ea-9d5b-a2cf96d017eb" UPDATED="2008-12-10 09:56:07.238" PROTECTEDTITLECACHE="true" TITLECACHE="Acosmeryx Boisduval, 1875" FULLTITLECACHE="" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1" PROTECTEDFULLTITLECACHE="true" AUTHORSHIPCACHE="Boisduval, 1875" BINOMHYBRID="false" GENUSORUNINOMIAL="Acosmeryx" HYBRIDFORMULA="false" MONOMHYBRID="false" NAMECACHE="Acosmeryx" PROTECTEDAUTHORSHIPCACHE="true" PROTECTEDNAMECACHE="true" TRINOMHYBRID="false" ANAMORPHIC="false" HOMOTYPICALGROUP_ID="22" NOMENCLATURALREFERENCE_ID="1" RANK_ID="774"/>\r
- <TAXONNAMEBASE DTYPE="ZoologicalName" ID="23" CREATED="2008-12-10 09:56:07.0" UUID="c3007d9a-3a7c-4cb1-9818-f4f529e760a4" UPDATED="2008-12-10 09:56:07.238" PROTECTEDTITLECACHE="true" TITLECACHE="Macroglossum Scopoli, 1777" FULLTITLECACHE="" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1" PROTECTEDFULLTITLECACHE="true" AUTHORSHIPCACHE="Scopoli, 1777" BINOMHYBRID="false" GENUSORUNINOMIAL="Macroglossum" HYBRIDFORMULA="false" MONOMHYBRID="false" NAMECACHE="Macroglossum" PROTECTEDAUTHORSHIPCACHE="true" PROTECTEDNAMECACHE="true" TRINOMHYBRID="false" ANAMORPHIC="false" HOMOTYPICALGROUP_ID="23" NOMENCLATURALREFERENCE_ID="1" RANK_ID="774"/>\r
- <TAXONNAMEBASE DTYPE="ZoologicalName" ID="24" CREATED="2008-12-10 09:56:07.0" UUID="aa3dadc8-dc13-4e35-86cd-fd3ca2e796ca" UPDATED="2008-12-10 09:56:07.238" PROTECTEDTITLECACHE="true" TITLECACHE="Hyles Hübner, 1819" FULLTITLECACHE="" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1" PROTECTEDFULLTITLECACHE="true" AUTHORSHIPCACHE="Hübner, 1819" BINOMHYBRID="false" GENUSORUNINOMIAL="Hyles" HYBRIDFORMULA="false" MONOMHYBRID="false" NAMECACHE="Hyles" PROTECTEDAUTHORSHIPCACHE="true" PROTECTEDNAMECACHE="true" TRINOMHYBRID="false" ANAMORPHIC="false" HOMOTYPICALGROUP_ID="24" NOMENCLATURALREFERENCE_ID="1" RANK_ID="774"/>\r
- <TAXONNAMEBASE DTYPE="ZoologicalName" ID="25" CREATED="2008-12-10 09:56:07.0" UUID="1673213d-60b4-4770-a8e9-509882340d0a" UPDATED="2008-12-10 09:56:07.238" PROTECTEDTITLECACHE="true" TITLECACHE="Deilephila Laspeyres, 1809" FULLTITLECACHE="" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1" PROTECTEDFULLTITLECACHE="true" AUTHORSHIPCACHE="Laspeyres, 1809" BINOMHYBRID="false" GENUSORUNINOMIAL="Deilephila" HYBRIDFORMULA="false" MONOMHYBRID="false" NAMECACHE="Deilephila" PROTECTEDAUTHORSHIPCACHE="true" PROTECTEDNAMECACHE="true" TRINOMHYBRID="false" ANAMORPHIC="false" HOMOTYPICALGROUP_ID="25" NOMENCLATURALREFERENCE_ID="1" RANK_ID="774"/>\r
- <TAXONNAMEBASE DTYPE="ZoologicalName" ID="26" CREATED="2008-12-10 09:56:07.0" UUID="862897cc-a3e6-436d-899a-96f82d02b4a2" UPDATED="2008-12-10 09:56:07.238" PROTECTEDTITLECACHE="true" TITLECACHE="Hippotion Hübner, 1819" FULLTITLECACHE="" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1" PROTECTEDFULLTITLECACHE="true" AUTHORSHIPCACHE="Hübner, 1819" BINOMHYBRID="false" GENUSORUNINOMIAL="Hippotion" HYBRIDFORMULA="false" MONOMHYBRID="false" NAMECACHE="Hippotion" PROTECTEDAUTHORSHIPCACHE="true" PROTECTEDNAMECACHE="true" TRINOMHYBRID="false" ANAMORPHIC="false" HOMOTYPICALGROUP_ID="26" NOMENCLATURALREFERENCE_ID="1" RANK_ID="774"/>\r
- <TAXONNAMEBASE DTYPE="ZoologicalName" ID="27" CREATED="2008-12-10 09:56:07.0" UUID="27004fcc-14d4-47d4-a3e1-75750fdb5b79" UPDATED="2008-12-10 09:56:07.238" PROTECTEDTITLECACHE="true" TITLECACHE="Atropos Agassiz, 1846" FULLTITLECACHE="" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1" PROTECTEDFULLTITLECACHE="true" AUTHORSHIPCACHE="Agassiz, 1846" BINOMHYBRID="false" GENUSORUNINOMIAL="Atropos" HYBRIDFORMULA="false" MONOMHYBRID="false" NAMECACHE="Atropos" PROTECTEDAUTHORSHIPCACHE="true" PROTECTEDNAMECACHE="true" TRINOMHYBRID="false" ANAMORPHIC="false" HOMOTYPICALGROUP_ID="27" NOMENCLATURALREFERENCE_ID="1" RANK_ID="774"/>\r
- <TAXONNAMEBASE DTYPE="ZoologicalName" ID="28" CREATED="2008-12-10 09:56:07.0" UUID="748ccb21-f3a4-4f32-a514-53931965ca2d" UPDATED="2008-12-10 09:56:07.238" PROTECTEDTITLECACHE="true" TITLECACHE="Atropos Oken, 1815" FULLTITLECACHE="" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1" PROTECTEDFULLTITLECACHE="true" AUTHORSHIPCACHE="Oken, 1815" BINOMHYBRID="false" GENUSORUNINOMIAL="Atropos" HYBRIDFORMULA="false" MONOMHYBRID="false" NAMECACHE="Atropos" PROTECTEDAUTHORSHIPCACHE="true" PROTECTEDNAMECACHE="true" TRINOMHYBRID="false" ANAMORPHIC="false" HOMOTYPICALGROUP_ID="28" NOMENCLATURALREFERENCE_ID="1" RANK_ID="774"/>\r
- <TAXONNAMEBASE DTYPE="ZoologicalName" ID="29" CREATED="2008-12-10 09:56:07.0" UUID="ea1f496a-c4cc-49e4-96d6-f46d58d23297" UPDATED="2008-12-10 09:56:07.238" PROTECTEDTITLECACHE="true" TITLECACHE="Brachyglossa Boisduval, 1828" FULLTITLECACHE="" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1" PROTECTEDFULLTITLECACHE="true" AUTHORSHIPCACHE="Boisduval, 1828" BINOMHYBRID="false" GENUSORUNINOMIAL="Brachyglossa" HYBRIDFORMULA="false" MONOMHYBRID="false" NAMECACHE="Brachyglossa" PROTECTEDAUTHORSHIPCACHE="true" PROTECTEDNAMECACHE="true" TRINOMHYBRID="false" ANAMORPHIC="false" HOMOTYPICALGROUP_ID="29" NOMENCLATURALREFERENCE_ID="1" RANK_ID="774"/>\r
- <TAXONNAMEBASE DTYPE="ZoologicalName" ID="30" CREATED="2008-12-10 09:56:07.0" UUID="238e41b3-9f4f-44b7-8cf5-28090febe9bb" UPDATED="2008-12-10 09:56:07.238" PROTECTEDTITLECACHE="true" TITLECACHE="Manduca Hübner, 1806" FULLTITLECACHE="" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1" PROTECTEDFULLTITLECACHE="true" AUTHORSHIPCACHE="Hübner, 1806" BINOMHYBRID="false" GENUSORUNINOMIAL="Manduca" HYBRIDFORMULA="false" MONOMHYBRID="false" NAMECACHE="Manduca" PROTECTEDAUTHORSHIPCACHE="true" PROTECTEDNAMECACHE="true" TRINOMHYBRID="false" ANAMORPHIC="false" HOMOTYPICALGROUP_ID="30" NOMENCLATURALREFERENCE_ID="1" RANK_ID="774"/>\r
- <TAXONNAMEBASE DTYPE="ZoologicalName" ID="31" CREATED="2008-12-10 09:56:07.0" UUID="feda2055-292c-4391-86b7-06bfdab77472" UPDATED="2008-12-10 09:56:07.238" PROTECTEDTITLECACHE="true" TITLECACHE="Atropos Leach, 1815" FULLTITLECACHE="" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1" PROTECTEDFULLTITLECACHE="true" AUTHORSHIPCACHE="Leach, 1815" BINOMHYBRID="false" GENUSORUNINOMIAL="Atropos" HYBRIDFORMULA="false" MONOMHYBRID="false" NAMECACHE="Atropos" PROTECTEDAUTHORSHIPCACHE="true" PROTECTEDNAMECACHE="true" TRINOMHYBRID="false" ANAMORPHIC="false" HOMOTYPICALGROUP_ID="15" NOMENCLATURALREFERENCE_ID="1" RANK_ID="774"/>\r
- <TAXONNAMEBASE DTYPE="ZoologicalName" ID="32" CREATED="2008-12-10 09:56:07.0" UUID="9faf43ed-2003-4bc2-9dfd-61c71eaa3829" UPDATED="2008-12-10 09:56:07.238" PROTECTEDTITLECACHE="true" TITLECACHE="Manduca afflicta (Grote, 1865)" FULLTITLECACHE="" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1" PROTECTEDFULLTITLECACHE="true" AUTHORSHIPCACHE="(Grote, 1865)" BINOMHYBRID="false" GENUSORUNINOMIAL="Manduca" HYBRIDFORMULA="false" MONOMHYBRID="false" NAMECACHE="Manduca afflicta" PROTECTEDAUTHORSHIPCACHE="true" PROTECTEDNAMECACHE="true" TRINOMHYBRID="false" ANAMORPHIC="false" HOMOTYPICALGROUP_ID="31" NOMENCLATURALREFERENCE_ID="1" RANK_ID="765"/>\r
- <TAXONNAMEBASE DTYPE="ZoologicalName" ID="33" CREATED="2008-12-10 09:56:07.0" UUID="666ecfcd-9ee3-41d4-8c47-7cb692cb7f27" UPDATED="2008-12-10 09:56:07.238" PROTECTEDTITLECACHE="true" TITLECACHE="Manduca bergarmatipes (Clark, 1927)" FULLTITLECACHE="" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1" PROTECTEDFULLTITLECACHE="true" AUTHORSHIPCACHE="(Clark, 1927)" BINOMHYBRID="false" GENUSORUNINOMIAL="Manduca" HYBRIDFORMULA="false" MONOMHYBRID="false" NAMECACHE="Manduca bergarmatipes" PROTECTEDAUTHORSHIPCACHE="true" PROTECTEDNAMECACHE="true" TRINOMHYBRID="false" ANAMORPHIC="false" HOMOTYPICALGROUP_ID="32" NOMENCLATURALREFERENCE_ID="1" RANK_ID="765"/>\r
- <TAXONNAMEBASE DTYPE="ZoologicalName" ID="34" CREATED="2008-12-10 09:56:07.0" UUID="31b6c4fb-fcd1-4ce7-b26a-2ca15a6c8ac5" UPDATED="2008-12-10 09:56:07.238" PROTECTEDTITLECACHE="true" TITLECACHE="Manduca chinchilla (Gehlen, 1942)" FULLTITLECACHE="" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1" PROTECTEDFULLTITLECACHE="true" AUTHORSHIPCACHE="(Gehlen, 1942)" BINOMHYBRID="false" GENUSORUNINOMIAL="Manduca" HYBRIDFORMULA="false" MONOMHYBRID="false" NAMECACHE="Manduca chinchilla" PROTECTEDAUTHORSHIPCACHE="true" PROTECTEDNAMECACHE="true" TRINOMHYBRID="false" ANAMORPHIC="false" HOMOTYPICALGROUP_ID="33" NOMENCLATURALREFERENCE_ID="1" RANK_ID="765"/>\r
- <TAXONNAMEBASE DTYPE="ZoologicalName" ID="35" CREATED="2008-12-10 09:56:07.0" UUID="866278ea-0a6a-4308-acb3-e7e22624e5ea" UPDATED="2008-12-10 09:56:07.238" PROTECTEDTITLECACHE="true" TITLECACHE="Acherontia atropos (Linnaeus, 1758)" FULLTITLECACHE="" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1" PROTECTEDFULLTITLECACHE="true" AUTHORSHIPCACHE="(Linnaeus, 1758)" BINOMHYBRID="false" GENUSORUNINOMIAL="Acherontia" HYBRIDFORMULA="false" MONOMHYBRID="false" NAMECACHE="Acherontia atropos" PROTECTEDAUTHORSHIPCACHE="true" PROTECTEDNAMECACHE="true" TRINOMHYBRID="false" ANAMORPHIC="false" HOMOTYPICALGROUP_ID="34" NOMENCLATURALREFERENCE_ID="1" RANK_ID="765"/>\r
- <TAXONNAMEBASE DTYPE="ZoologicalName" ID="36" CREATED="2008-12-10 09:56:07.0" UUID="7969821b-a2cf-4d01-95ec-6a5ed0ca3f69" UPDATED="2008-12-10 09:56:07.238" PROTECTEDTITLECACHE="true" TITLECACHE="Acherontia lachesis (Fabricius, 1798)" FULLTITLECACHE="" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1" PROTECTEDFULLTITLECACHE="true" AUTHORSHIPCACHE="(Fabricius, 1798)" BINOMHYBRID="false" GENUSORUNINOMIAL="Acherontia" HYBRIDFORMULA="false" MONOMHYBRID="false" NAMECACHE="Acherontia lachesis" PROTECTEDAUTHORSHIPCACHE="true" PROTECTEDNAMECACHE="true" TRINOMHYBRID="false" ANAMORPHIC="false" HOMOTYPICALGROUP_ID="35" NOMENCLATURALREFERENCE_ID="1" RANK_ID="765"/>\r
- <TAXONNAMEBASE DTYPE="ZoologicalName" ID="37" CREATED="2008-12-10 09:56:07.0" UUID="61b1dcae-8aa6-478a-bcd6-080cf0eb6ad7" UPDATED="2008-12-10 09:56:07.238" PROTECTEDTITLECACHE="true" TITLECACHE="Acherontia styx Westwood, 1847" FULLTITLECACHE="" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1" PROTECTEDFULLTITLECACHE="true" AUTHORSHIPCACHE="Westwood, 1847" BINOMHYBRID="false" GENUSORUNINOMIAL="Acherontia" HYBRIDFORMULA="false" MONOMHYBRID="false" NAMECACHE="Acherontia styx" PROTECTEDAUTHORSHIPCACHE="true" PROTECTEDNAMECACHE="true" TRINOMHYBRID="false" ANAMORPHIC="false" HOMOTYPICALGROUP_ID="36" NOMENCLATURALREFERENCE_ID="1" RANK_ID="765"/>\r
- <TAXONNAMEBASE DTYPE="BotanicalName" ID="38" CREATED="2008-12-10 09:56:07.0" UUID="c9e7124b-2e60-4df2-996c-b7d024c85d33" UPDATED="2008-12-10 09:56:07.238" PROTECTEDTITLECACHE="true" TITLECACHE="Cryptocoryne x purpurea nothovar borneoensis N.Jacobsen, Bastm. & Yuji Sasaki" FULLTITLECACHE="" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1" PROTECTEDFULLTITLECACHE="true" AUTHORSHIPCACHE="N.Jacobsen, Bastm. & Yuji Sasaki" BINOMHYBRID="true" GENUSORUNINOMIAL="Cryptocoryne" HYBRIDFORMULA="false" MONOMHYBRID="false" NAMECACHE="Cryptocoryne x purpurea nothovar borneoensis" PROTECTEDAUTHORSHIPCACHE="true" PROTECTEDNAMECACHE="true" TRINOMHYBRID="false" ANAMORPHIC="false" HOMOTYPICALGROUP_ID="37" NOMENCLATURALREFERENCE_ID="1" RANK_ID="761"/>\r
-\r
-\r
- <HIBERNATE_SEQUENCES SEQUENCE_NAME="DescriptionBase" NEXT_VAL="35"/>\r
- <HIBERNATE_SEQUENCES SEQUENCE_NAME="TaxonBase" NEXT_VAL="39"/>\r
-\r
+ <DESCRIPTIONBASE DTYPE="TaxonDescription" ID="1" CREATED="2008-12-10 09:56:07.0" UUID="5f3265ed-68ad-4ec3-826f-0d29d25986b9" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE=" sec. ???" IMAGEGALLERY="false" TAXON_ID="1"/>
+ <DESCRIPTIONBASE DTYPE="TaxonDescription" ID="2" CREATED="2008-12-10 09:56:07.0" UUID="d69d6d40-eb98-42f9-8d30-fbeb0a7db33b" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE=" sec. ???" IMAGEGALLERY="false" TAXON_ID="2"/>
+ <DESCRIPTIONBASE DTYPE="TaxonDescription" ID="3" CREATED="2008-12-10 09:56:07.0" UUID="c6782124-7cf0-4454-880f-c4f16dc03105" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Sphingidae Linnaeus, 1758 sec. cate-sphingidae.org" IMAGEGALLERY="false" TAXON_ID="3"/>
+ <DESCRIPTIONBASE DTYPE="TaxonDescription" ID="4" CREATED="2008-12-10 09:56:07.0" UUID="68ff9cb2-3fc0-4580-8295-bbb0de0d42bf" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Lathoe Fabricius, 1807 sec. cate-sphingidae.org" IMAGEGALLERY="false" TAXON_ID="4"/>
+ <DESCRIPTIONBASE DTYPE="TaxonDescription" ID="5" CREATED="2008-12-10 09:56:07.0" UUID="1240e8ba-32a2-4914-ad00-2f87e18ab65e" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Smerinthus Latreille, 1802 sec. cate-sphingidae.org" IMAGEGALLERY="false" TAXON_ID="5"/>
+ <DESCRIPTIONBASE DTYPE="TaxonDescription" ID="6" CREATED="2008-12-10 09:56:07.0" UUID="5c4bef76-72a6-4e39-a6d8-1d8707519d7a" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Manduca Hübner, 1807 sec. cate-sphingidae.org" IMAGEGALLERY="false" TAXON_ID="6"/>
+ <DESCRIPTIONBASE DTYPE="TaxonDescription" ID="7" CREATED="2008-12-10 09:56:07.0" UUID="40dadf87-4a1c-4852-afb4-6787a4e75854" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Lepchina Oberthür, 1904 sec. cate-sphingidae.org" IMAGEGALLERY="false" TAXON_ID="7"/>
+ <DESCRIPTIONBASE DTYPE="TaxonDescription" ID="8" CREATED="2008-12-10 09:56:07.0" UUID="1f02a155-3669-464b-99a9-4c36307549ce" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Smerinthus kindermannii Lederer, 1853 sec. cate-sphingidae.org" IMAGEGALLERY="false" TAXON_ID="8"/>
+ <DESCRIPTIONBASE DTYPE="TaxonDescription" ID="9" CREATED="2008-12-10 09:56:07.0" UUID="b58fb240-01ac-4a68-99f7-1ccb5576d663" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Mimas Hübner, 1819 sec. cate-sphingidae.org" IMAGEGALLERY="false" TAXON_ID="9"/>
+ <DESCRIPTIONBASE DTYPE="TaxonDescription" ID="10" CREATED="2008-12-10 09:56:07.0" UUID="5b99a457-e776-41a8-b18b-58c5fb0b7b60" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Callambulyx Rothschild & Jordan, 1903 sec. cate-sphingidae.org" IMAGEGALLERY="false" TAXON_ID="10"/>
+ <DESCRIPTIONBASE DTYPE="TaxonDescription" ID="11" CREATED="2008-12-10 09:56:07.0" UUID="3316a5a7-56f5-4e12-b6ed-c1758e0d2065" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Dolbina Staudinger, 1877 sec. cate-sphingidae.org" IMAGEGALLERY="false" TAXON_ID="11"/>
+ <DESCRIPTIONBASE DTYPE="TaxonDescription" ID="12" CREATED="2008-12-10 09:56:07.0" UUID="0e9bf65a-1322-408c-bf9e-01ea6834f191" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Akbesia Rothschild & Jordan, 1903 sec. cate-sphingidae.org" IMAGEGALLERY="false" TAXON_ID="12"/>
+ <DESCRIPTIONBASE DTYPE="TaxonDescription" ID="13" CREATED="2008-12-10 09:56:07.0" UUID="1f17b094-77a6-4f0b-b687-f6cd0b3f5cb8" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Sphinx Linnaeus, 1758 sec. cate-sphingidae.org" IMAGEGALLERY="false" TAXON_ID="13"/>
+ <DESCRIPTIONBASE DTYPE="TaxonDescription" ID="14" CREATED="2008-12-10 09:56:07.0" UUID="eb591ebe-bec2-4639-b449-c3fd7f5e129c" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Agrius Hübner, 1819 sec. cate-sphingidae.org" IMAGEGALLERY="false" TAXON_ID="14"/>
+ <DESCRIPTIONBASE DTYPE="TaxonDescription" ID="15" CREATED="2008-12-10 09:56:07.0" UUID="f2a093ed-7d09-45ea-abca-366a299b19d3" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Acherontia Laspeyres, 1809 sec. cate-sphingidae.org" IMAGEGALLERY="false" TAXON_ID="15"/>
+ <DESCRIPTIONBASE DTYPE="TaxonDescription" ID="16" CREATED="2008-12-10 09:56:07.0" UUID="f6367b5a-6466-4cf2-8eb0-23a558514914" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Hemaris Dalman, 1816 sec. cate-sphingidae.org" IMAGEGALLERY="false" TAXON_ID="16"/>
+ <DESCRIPTIONBASE DTYPE="TaxonDescription" ID="17" CREATED="2008-12-10 09:56:07.0" UUID="fd9fe4e0-5137-483c-a8ee-c27d2f45d308" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Proserpinus Hübner, 1819 sec. cate-sphingidae.org" IMAGEGALLERY="false" TAXON_ID="17"/>
+ <DESCRIPTIONBASE DTYPE="TaxonDescription" ID="18" CREATED="2008-12-10 09:56:07.0" UUID="5bb4bd8f-9d96-4535-8a7b-3895bc229fd0" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Sphingonaepiopsis Wallengren, 1858 sec. cate-sphingidae.org" IMAGEGALLERY="false" TAXON_ID="18"/>
+ <DESCRIPTIONBASE DTYPE="TaxonDescription" ID="19" CREATED="2008-12-10 09:56:07.0" UUID="61bad9ae-900d-40e2-90f1-cd516f68a7aa" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Rethera Rothschild & Jordan, 1903 sec. cate-sphingidae.org" IMAGEGALLERY="false" TAXON_ID="19"/>
+ <DESCRIPTIONBASE DTYPE="TaxonDescription" ID="20" CREATED="2008-12-10 09:56:07.0" UUID="8ed18615-f9be-4c3c-871a-7d2371b49869" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Daphnis Hübner, 1819 sec. cate-sphingidae.org" IMAGEGALLERY="false" TAXON_ID="20"/>
+ <DESCRIPTIONBASE DTYPE="TaxonDescription" ID="21" CREATED="2008-12-10 09:56:07.0" UUID="4e2b5eca-db5f-41ac-a4e3-e936cae658da" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Clarina Tutt, 1903 sec. cate-sphingidae.org" IMAGEGALLERY="false" TAXON_ID="21"/>
+ <DESCRIPTIONBASE DTYPE="TaxonDescription" ID="22" CREATED="2008-12-10 09:56:07.0" UUID="a96cdfdb-7f9d-44b7-8c94-d68bc18850d5" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Acosmeryx Boisduval, 1875 sec. cate-sphingidae.org" IMAGEGALLERY="false" TAXON_ID="22"/>
+ <DESCRIPTIONBASE DTYPE="TaxonDescription" ID="23" CREATED="2008-12-10 09:56:07.0" UUID="7c79ee94-fc7f-4437-b5aa-b27a4dd482ac" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Macroglossum Scopoli, 1777 sec. cate-sphingidae.org" IMAGEGALLERY="false" TAXON_ID="23"/>
+ <DESCRIPTIONBASE DTYPE="TaxonDescription" ID="24" CREATED="2008-12-10 09:56:07.0" UUID="03a54c6d-efb8-48c3-8672-e01880bcd5d7" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Hyles Hübner, 1819 sec. cate-sphingidae.org" IMAGEGALLERY="false" TAXON_ID="24"/>
+ <DESCRIPTIONBASE DTYPE="TaxonDescription" ID="25" CREATED="2008-12-10 09:56:07.0" UUID="e7243cb3-625b-4104-bb46-b92d48351d76" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Deilephila Laspeyres, 1809 sec. cate-sphingidae.org" IMAGEGALLERY="false" TAXON_ID="25"/>
+ <DESCRIPTIONBASE DTYPE="TaxonDescription" ID="26" CREATED="2008-12-10 09:56:07.0" UUID="0d7bf5f6-9fef-482e-b6f8-7e9a9c42c744" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Hippotion Hübner, 1819 sec. cate-sphingidae.org" IMAGEGALLERY="false" TAXON_ID="26"/>
+ <DESCRIPTIONBASE DTYPE="TaxonDescription" ID="27" CREATED="2008-12-10 09:56:07.0" UUID="cfa7ca5e-3ed1-4eda-93ab-b3c5e5fc3661" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Manduca afflicta (Grote, 1865) sec. cate-sphingidae.org" IMAGEGALLERY="false" TAXON_ID="32"/>
+ <DESCRIPTIONBASE DTYPE="TaxonDescription" ID="28" CREATED="2008-12-10 09:56:07.0" UUID="7129d981-7c92-4bef-9afe-a0d6e21a2e4e" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Manduca bergarmatipes (Clark, 1927) sec. cate-sphingidae.org" IMAGEGALLERY="false" TAXON_ID="33"/>
+ <DESCRIPTIONBASE DTYPE="TaxonDescription" ID="29" CREATED="2008-12-10 09:56:07.0" UUID="ef66e9ec-b9a4-435e-98ed-e3e72a86e9a4" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Manduca chinchilla (Gehlen, 1942) sec. cate-sphingidae.org" IMAGEGALLERY="false" TAXON_ID="34"/>
+ <DESCRIPTIONBASE DTYPE="TaxonDescription" ID="30" CREATED="2008-12-10 09:56:07.0" UUID="6ef3bcac-7192-4ee0-8c1b-57e0b3d49c5c" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Acherontia atropos (Linnaeus, 1758) sec. cate-sphingidae.org" IMAGEGALLERY="false" TAXON_ID="35"/>
+ <DESCRIPTIONBASE DTYPE="TaxonDescription" ID="31" CREATED="2008-12-10 09:56:07.0" UUID="fd6cdb64-142c-4df1-b366-c5e76f08a1fc" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Acherontia lachesis (Fabricius, 1798) sec. cate-sphingidae.org" IMAGEGALLERY="false" TAXON_ID="36"/>
+ <DESCRIPTIONBASE DTYPE="TaxonDescription" ID="32" CREATED="2008-12-10 09:56:07.0" UUID="6d647bbe-5d91-46be-87f5-1781d9d5842c" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Acherontia styx Westwood, 1847 sec. cate-sphingidae.org" IMAGEGALLERY="false" TAXON_ID="37"/>
+ <DESCRIPTIONBASE DTYPE="TaxonDescription" ID="33" CREATED="2008-12-10 09:56:07.0" UUID="620de7a8-8c83-42c9-add7-fdc55ebf943a" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Cryptocoryne x purpurea nothovar borneoensis N.Jacobsen, Bastm. & Yuji Sasaki sec. cate-sphingidae.org" IMAGEGALLERY="false" TAXON_ID="38"/>
+ <DESCRIPTIONBASE DTYPE="SpecimenDescription" ID="34" CREATED="2013-06-26 18:26:26.0" UUID="7c45d218-b2c9-4298-9abd-b86e60f4fef8" UPDATED="2013-06-26 18:26:26.0" PROTECTEDTITLECACHE="true" TITLECACHE="specimendescription 1" IMAGEGALLERY="false" TAXON_ID="[null]"/>
+
+ <DESCRIPTIONBASE_MARKER DESCRIPTIONBASE_ID="31" MARKERS_ID="1"/>
+ <DESCRIPTIONBASE_MARKER DESCRIPTIONBASE_ID="31" MARKERS_ID="2"/>
+ <MARKER ID="1" UUID="a0b943f6-3737-4ba4-9d5c-72f3f1476996" FLAG="TRUE" MARKEDOBJ_TYPE="eu.etaxonomy.cdm.model.description.TaxonDescription" MARKEDOBJ_ID="31" MARKERTYPE_ID="890"/>
+ <MARKER ID="2" UUID="e873c908-ec5b-4edf-8e80-11da5a9d26b3" FLAG="FALSE" MARKEDOBJ_TYPE="eu.etaxonomy.cdm.model.description.TaxonDescription" MARKEDOBJ_ID="31" MARKERTYPE_ID="892"/>
+
+ <DESCRIPTIONELEMENTBASE DTYPE="Distribution" ID="1" INDESCRIPTION_ID="1" CREATED="2008-12-10 09:56:07.0" UUID="40458e70-a065-450f-b27d-adf61cc28a7f" UPDATED="2008-12-10 09:56:07.253" FEATURE_ID="[null]" AREA_ID="1969" STATUS_ID="1994"/>
+ <DESCRIPTIONELEMENTBASE DTYPE="Distribution" ID="2" INDESCRIPTION_ID="2" CREATED="2008-12-10 09:56:07.0" UUID="d4099b8d-0644-4025-8a56-e7fc2d95004e" UPDATED="2008-12-10 09:56:07.253" FEATURE_ID="[null]" AREA_ID="1969" STATUS_ID="1994"/>
+ <DESCRIPTIONELEMENTBASE DTYPE="Distribution" ID="3" INDESCRIPTION_ID="3" CREATED="2008-12-10 09:56:07.0" UUID="317fafca-3722-4d8d-8c4f-701d4f5b911d" UPDATED="2008-12-10 09:56:07.253" FEATURE_ID="[null]" AREA_ID="1969" STATUS_ID="1994"/>
+ <DESCRIPTIONELEMENTBASE DTYPE="Distribution" ID="4" INDESCRIPTION_ID="4" CREATED="2008-12-10 09:56:07.0" UUID="452b7bbc-cdb3-4315-9a3d-5be293fb85ee" UPDATED="2008-12-10 09:56:07.253" FEATURE_ID="[null]" AREA_ID="1969" STATUS_ID="1994"/>
+ <DESCRIPTIONELEMENTBASE DTYPE="Distribution" ID="5" INDESCRIPTION_ID="5" CREATED="2008-12-10 09:56:07.0" UUID="db46d7eb-e8df-4a42-bcc1-d6b4f6baf2f9" UPDATED="2008-12-10 09:56:07.253" FEATURE_ID="[null]" AREA_ID="1969" STATUS_ID="1994"/>
+ <DESCRIPTIONELEMENTBASE DTYPE="Distribution" ID="6" INDESCRIPTION_ID="6" CREATED="2008-12-10 09:56:07.0" UUID="51f4771f-b01d-4e3f-a5de-87c6f7b0e2f2" UPDATED="2008-12-10 09:56:07.253" FEATURE_ID="[null]" AREA_ID="1969" STATUS_ID="1994"/>
+ <DESCRIPTIONELEMENTBASE DTYPE="Distribution" ID="7" INDESCRIPTION_ID="7" CREATED="2008-12-10 09:56:07.0" UUID="a453ae39-fbb9-494d-81e1-dc9e1a305e01" UPDATED="2008-12-10 09:56:07.253" FEATURE_ID="[null]" AREA_ID="1969" STATUS_ID="1994"/>
+ <DESCRIPTIONELEMENTBASE DTYPE="Distribution" ID="8" INDESCRIPTION_ID="8" CREATED="2008-12-10 09:56:07.0" UUID="11f15758-fa8d-4dae-91b9-e5b475481890" UPDATED="2008-12-10 09:56:07.253" FEATURE_ID="[null]" AREA_ID="1969" STATUS_ID="1994"/>
+ <DESCRIPTIONELEMENTBASE DTYPE="Distribution" ID="9" INDESCRIPTION_ID="9" CREATED="2008-12-10 09:56:07.0" UUID="bbe158e3-89d6-443f-a4e5-7ef9e790b6ff" UPDATED="2008-12-10 09:56:07.253" FEATURE_ID="[null]" AREA_ID="1969" STATUS_ID="1994"/>
+ <DESCRIPTIONELEMENTBASE DTYPE="Distribution" ID="10" INDESCRIPTION_ID="10" CREATED="2008-12-10 09:56:07.0" UUID="160612a7-85b6-4b3f-a892-f8d6066de37a" UPDATED="2008-12-10 09:56:07.253" FEATURE_ID="[null]" AREA_ID="1970" STATUS_ID="1994"/>
+ <DESCRIPTIONELEMENTBASE DTYPE="Distribution" ID="11" INDESCRIPTION_ID="11" CREATED="2008-12-10 09:56:07.0" UUID="06ecc14e-6c65-44ed-a3f4-91c27298b50c" UPDATED="2008-12-10 09:56:07.253" FEATURE_ID="[null]" AREA_ID="1970" STATUS_ID="1994"/>
+ <DESCRIPTIONELEMENTBASE DTYPE="Distribution" ID="12" INDESCRIPTION_ID="12" CREATED="2008-12-10 09:56:07.0" UUID="f6ba0d83-bdca-470d-a793-435981ba08cf" UPDATED="2008-12-10 09:56:07.253" FEATURE_ID="[null]" AREA_ID="1970" STATUS_ID="1994"/>
+ <DESCRIPTIONELEMENTBASE DTYPE="Distribution" ID="13" INDESCRIPTION_ID="13" CREATED="2008-12-10 09:56:07.0" UUID="4f02b16e-7711-4e74-af56-e3f4d755dc18" UPDATED="2008-12-10 09:56:07.253" FEATURE_ID="[null]" AREA_ID="1970" STATUS_ID="1994"/>
+ <DESCRIPTIONELEMENTBASE DTYPE="Distribution" ID="14" INDESCRIPTION_ID="14" CREATED="2008-12-10 09:56:07.0" UUID="173fa31b-47d4-461f-8198-84ffbf67df14" UPDATED="2008-12-10 09:56:07.253" FEATURE_ID="[null]" AREA_ID="1970" STATUS_ID="1994"/>
+ <DESCRIPTIONELEMENTBASE DTYPE="Distribution" ID="15" INDESCRIPTION_ID="15" CREATED="2008-12-10 09:56:07.0" UUID="aa079c74-ea22-4d8d-98e3-95da8ba79fa6" UPDATED="2008-12-10 09:56:07.253" FEATURE_ID="[null]" AREA_ID="1970" STATUS_ID="1994"/>
+ <DESCRIPTIONELEMENTBASE DTYPE="Distribution" ID="16" INDESCRIPTION_ID="16" CREATED="2008-12-10 09:56:07.0" UUID="bb1422a1-8b54-4b51-9dcf-3644c68d2111" UPDATED="2008-12-10 09:56:07.253" FEATURE_ID="[null]" AREA_ID="1970" STATUS_ID="1994"/>
+ <DESCRIPTIONELEMENTBASE DTYPE="Distribution" ID="17" INDESCRIPTION_ID="17" CREATED="2008-12-10 09:56:07.0" UUID="2a65248c-93cf-4113-b149-b890629ccefb" UPDATED="2008-12-10 09:56:07.253" FEATURE_ID="[null]" AREA_ID="1970" STATUS_ID="1994"/>
+ <DESCRIPTIONELEMENTBASE DTYPE="Distribution" ID="18" INDESCRIPTION_ID="18" CREATED="2008-12-10 09:56:07.0" UUID="4d8eb1b4-0cb4-4880-9f45-471f5f22f34a" UPDATED="2008-12-10 09:56:07.253" FEATURE_ID="[null]" AREA_ID="1970" STATUS_ID="1994"/>
+ <DESCRIPTIONELEMENTBASE DTYPE="Distribution" ID="19" INDESCRIPTION_ID="19" CREATED="2008-12-10 09:56:07.0" UUID="c47cd92c-0fac-4ed2-83f1-27f83fdb4657" UPDATED="2008-12-10 09:56:07.253" FEATURE_ID="[null]" AREA_ID="1970" STATUS_ID="1994"/>
+ <DESCRIPTIONELEMENTBASE DTYPE="Distribution" ID="20" INDESCRIPTION_ID="20" CREATED="2008-12-10 09:56:07.0" UUID="eb89b640-16f3-43fa-add1-15d0198b3274" UPDATED="2008-12-10 09:56:07.253" FEATURE_ID="[null]" AREA_ID="1970" STATUS_ID="1994"/>
+ <DESCRIPTIONELEMENTBASE DTYPE="Distribution" ID="21" INDESCRIPTION_ID="2" CREATED="2008-12-10 09:56:07.0" UUID="83687081-25c8-4493-9472-a48f973fd8f6" UPDATED="2008-12-10 09:56:07.253" FEATURE_ID="[null]" AREA_ID="1968" STATUS_ID="1994"/>
+ <DESCRIPTIONELEMENTBASE DTYPE="Distribution" ID="22" INDESCRIPTION_ID="4" CREATED="2008-12-10 09:56:07.0" UUID="35174e33-0a19-4d25-8a92-af46e9720120" UPDATED="2008-12-10 09:56:07.253" FEATURE_ID="[null]" AREA_ID="1968" STATUS_ID="1994"/>
+ <DESCRIPTIONELEMENTBASE DTYPE="Distribution" ID="23" INDESCRIPTION_ID="6" CREATED="2008-12-10 09:56:07.0" UUID="ee5163e6-ce9f-4465-82cd-36f33b857af9" UPDATED="2008-12-10 09:56:07.253" FEATURE_ID="[null]" AREA_ID="1968" STATUS_ID="1994"/>
+ <DESCRIPTIONELEMENTBASE DTYPE="Distribution" ID="24" INDESCRIPTION_ID="8" CREATED="2008-12-10 09:56:07.0" UUID="54f31cbb-4447-456e-86cd-490848589173" UPDATED="2008-12-10 09:56:07.253" FEATURE_ID="[null]" AREA_ID="1968" STATUS_ID="1994"/>
+ <DESCRIPTIONELEMENTBASE DTYPE="Distribution" ID="25" INDESCRIPTION_ID="10" CREATED="2008-12-10 09:56:07.0" UUID="b7ed185d-a5df-4d2d-939f-c3fcb0bead69" UPDATED="2008-12-10 09:56:07.253" FEATURE_ID="[null]" AREA_ID="1968" STATUS_ID="1994"/>
+ <DESCRIPTIONELEMENTBASE DTYPE="Distribution" ID="26" INDESCRIPTION_ID="12" CREATED="2008-12-10 09:56:07.0" UUID="4e077042-d34d-4975-a042-01abdb0631bd" UPDATED="2008-12-10 09:56:07.253" FEATURE_ID="[null]" AREA_ID="1968" STATUS_ID="1994"/>
+ <DESCRIPTIONELEMENTBASE DTYPE="Distribution" ID="27" INDESCRIPTION_ID="14" CREATED="2008-12-10 09:56:07.0" UUID="a6e78e58-2a8a-4071-8a33-e9c182f72efc" UPDATED="2008-12-10 09:56:07.253" FEATURE_ID="[null]" AREA_ID="1968" STATUS_ID="1994"/>
+ <DESCRIPTIONELEMENTBASE DTYPE="Distribution" ID="28" INDESCRIPTION_ID="16" CREATED="2008-12-10 09:56:07.0" UUID="8854b227-5bc2-4076-a35c-d14468e5961d" UPDATED="2008-12-10 09:56:07.253" FEATURE_ID="[null]" AREA_ID="1968" STATUS_ID="1994"/>
+ <DESCRIPTIONELEMENTBASE DTYPE="Distribution" ID="29" INDESCRIPTION_ID="18" CREATED="2008-12-10 09:56:07.0" UUID="c44a489d-ecd2-4c61-9e63-22eb4e9de14e" UPDATED="2008-12-10 09:56:07.253" FEATURE_ID="[null]" AREA_ID="1968" STATUS_ID="1994"/>
+ <DESCRIPTIONELEMENTBASE DTYPE="Distribution" ID="30" INDESCRIPTION_ID="20" CREATED="2008-12-10 09:56:07.0" UUID="9cfbd03a-4cd7-42fb-86ed-d469b5126caf" UPDATED="2008-12-10 09:56:07.253" FEATURE_ID="[null]" AREA_ID="1968" STATUS_ID="1994"/>
+ <DESCRIPTIONELEMENTBASE DTYPE="Distribution" ID="31" INDESCRIPTION_ID="31" CREATED="2008-12-10 09:56:07.0" UUID="cc31333c-90b6-4927-9597-1820755ffab3" UPDATED="2008-12-10 09:56:07.253" FEATURE_ID="[null]" AREA_ID="1970" STATUS_ID="1995"/>
+ <DESCRIPTIONELEMENTBASE DTYPE="Distribution" ID="32" INDESCRIPTION_ID="32" CREATED="2008-12-10 09:56:07.0" UUID="5e49861f-94bc-45c1-a9b4-be9513b91dd9" UPDATED="2008-12-10 09:56:07.253" FEATURE_ID="[null]" AREA_ID="1970" STATUS_ID="1995"/>
+ <DESCRIPTIONELEMENTBASE DTYPE="Distribution" ID="33" INDESCRIPTION_ID="33" CREATED="2008-12-10 09:56:07.0" UUID="cf4b976f-de07-4e71-ab70-4585062c6ed5" UPDATED="2008-12-10 09:56:07.253" FEATURE_ID="[null]" AREA_ID="1970" STATUS_ID="1995"/>
+ <DESCRIPTIONELEMENTBASE DTYPE="TextData" ID="34" INDESCRIPTION_ID="1" CREATED="2008-12-10 09:56:07.0" UUID="31a0160a-51b2-4565-85cf-2be58cb561d6" UPDATED="2008-12-10 09:56:07.253" FEATURE_ID="922" AREA_ID="[null]" STATUS_ID="[null]"/>
+ <DESCRIPTIONELEMENTBASE DTYPE="TextData" ID="35" INDESCRIPTION_ID="1" CREATED="2008-12-10 09:56:07.0" UUID="50f6b799-3585-40a7-b69d-e7be77b2651a" UPDATED="2008-12-10 09:56:07.253" FEATURE_ID="936" AREA_ID="[null]" STATUS_ID="[null]"/>
+ <DESCRIPTIONELEMENTBASE DTYPE="TextData" ID="36" INDESCRIPTION_ID="2" CREATED="2008-12-10 09:56:07.0" UUID="c1e21ed1-4925-4e85-845f-e4b7a8386a33" UPDATED="2008-12-10 09:56:07.253" FEATURE_ID="936" AREA_ID="[null]" STATUS_ID="[null]"/>
+ <DESCRIPTIONELEMENTBASE DTYPE="TextData" ID="37" INDESCRIPTION_ID="34" CREATED="2013-06-26 09:56:07.0" UUID="ab7d872a-0793-40b1-ae65-1c574dc09fc4" UPDATED="2013-06-26 09:56:07.253" FEATURE_ID="922" AREA_ID="[null]" STATUS_ID="[null]"/>
+ <DESCRIPTIONELEMENTBASE_LANGUAGESTRING DESCRIPTIONELEMENTBASE_ID="34" MULTILANGUAGETEXT_ID="1" MULTILANGUAGETEXT_MAPKEY_ID="406"/>
+ <DESCRIPTIONELEMENTBASE_LANGUAGESTRING DESCRIPTIONELEMENTBASE_ID="35" MULTILANGUAGETEXT_ID="2" MULTILANGUAGETEXT_MAPKEY_ID="406"/>
+ <DESCRIPTIONELEMENTBASE_LANGUAGESTRING DESCRIPTIONELEMENTBASE_ID="36" MULTILANGUAGETEXT_ID="3" MULTILANGUAGETEXT_MAPKEY_ID="406"/>
+ <DESCRIPTIONELEMENTBASE_LANGUAGESTRING DESCRIPTIONELEMENTBASE_ID="37" MULTILANGUAGETEXT_ID="4" MULTILANGUAGETEXT_MAPKEY_ID="406"/>
+ <DESCRIPTIONELEMENTBASE_ORIGINALSOURCEBASE DESCRIPTIONELEMENTBASE_ID="34" SOURCES_ID="1"/>
+ <HOMOTYPICALGROUP ID="1" CREATED="2008-12-10 09:56:07.0" UUID="7b214eb9-a6ac-48e5-af02-bbea634d2a03" UPDATED="2008-12-10 09:56:07.238"/>
+ <HOMOTYPICALGROUP ID="2" CREATED="2008-12-10 09:56:07.0" UUID="6c241a4c-e5a0-4344-8e5e-a81f17b75973" UPDATED="2008-12-10 09:56:07.253"/>
+ <HOMOTYPICALGROUP ID="3" CREATED="2008-12-10 09:56:07.0" UUID="76eac2b8-9c5a-4b25-acd1-e4e0d894106f" UPDATED="2008-12-10 09:56:07.253"/>
+ <HOMOTYPICALGROUP ID="4" CREATED="2008-12-10 09:56:07.0" UUID="3c6ff240-9cab-4ec9-b47e-97280318ab30" UPDATED="2008-12-10 09:56:07.253"/>
+ <HOMOTYPICALGROUP ID="5" CREATED="2008-12-10 09:56:07.0" UUID="5cd73df5-1c72-44a6-9864-adb145d8bd56" UPDATED="2008-12-10 09:56:07.253"/>
+ <HOMOTYPICALGROUP ID="6" CREATED="2008-12-10 09:56:07.0" UUID="335977f0-ef55-4294-b78b-aed47435b428" UPDATED="2008-12-10 09:56:07.253"/>
+ <HOMOTYPICALGROUP ID="7" CREATED="2008-12-10 09:56:07.0" UUID="e902a44e-7b26-4dc5-8251-d62c48d01bad" UPDATED="2008-12-10 09:56:07.253"/>
+ <HOMOTYPICALGROUP ID="8" CREATED="2008-12-10 09:56:07.0" UUID="4806e853-d7c3-4216-8fa1-022be728bd3c" UPDATED="2008-12-10 09:56:07.253"/>
+ <HOMOTYPICALGROUP ID="9" CREATED="2008-12-10 09:56:07.0" UUID="c454bd99-c7f3-43d6-8846-5fcc24f0c31b" UPDATED="2008-12-10 09:56:07.253"/>
+ <HOMOTYPICALGROUP ID="10" CREATED="2008-12-10 09:56:07.0" UUID="4a92945c-e198-4f59-a19c-717e0b83e9f0" UPDATED="2008-12-10 09:56:07.253"/>
+ <HOMOTYPICALGROUP ID="11" CREATED="2008-12-10 09:56:07.0" UUID="c5e7f225-60c2-4001-9488-0f584ba522ea" UPDATED="2008-12-10 09:56:07.238"/>
+ <HOMOTYPICALGROUP ID="12" CREATED="2008-12-10 09:56:07.0" UUID="a4f0abc0-f6c3-42f0-98e9-10aec07415f2" UPDATED="2008-12-10 09:56:07.253"/>
+ <HOMOTYPICALGROUP ID="13" CREATED="2008-12-10 09:56:07.0" UUID="9eb6d8c4-4b74-481f-9063-6251843606a3" UPDATED="2008-12-10 09:56:07.253"/>
+ <HOMOTYPICALGROUP ID="14" CREATED="2008-12-10 09:56:07.0" UUID="2addf98d-ab27-4b26-ae1b-06fda059cddd" UPDATED="2008-12-10 09:56:07.253"/>
+ <HOMOTYPICALGROUP ID="15" CREATED="2008-12-10 09:56:07.0" UUID="399af850-b662-4c3d-9038-ea5219af49de" UPDATED="2008-12-10 09:56:07.253"/>
+ <HOMOTYPICALGROUP ID="16" CREATED="2008-12-10 09:56:07.0" UUID="ffb3e841-ea6a-4107-8dc7-7ee52f7ae500" UPDATED="2008-12-10 09:56:07.253"/>
+ <HOMOTYPICALGROUP ID="17" CREATED="2008-12-10 09:56:07.0" UUID="de608141-143c-4337-91e9-4094fe814522" UPDATED="2008-12-10 09:56:07.253"/>
+ <HOMOTYPICALGROUP ID="18" CREATED="2008-12-10 09:56:07.0" UUID="2f17e98b-d5bd-4c54-ab5c-e0fea4eaaae0" UPDATED="2008-12-10 09:56:07.253"/>
+ <HOMOTYPICALGROUP ID="19" CREATED="2008-12-10 09:56:07.0" UUID="9a554521-7f2a-451d-8bea-64827d562db9" UPDATED="2008-12-10 09:56:07.253"/>
+ <HOMOTYPICALGROUP ID="20" CREATED="2008-12-10 09:56:07.0" UUID="7dcc4480-9a81-47a5-9830-0a70c8f64b79" UPDATED="2008-12-10 09:56:07.253"/>
+ <HOMOTYPICALGROUP ID="21" CREATED="2008-12-10 09:56:07.0" UUID="a779e64e-6d57-406f-97c2-f09187ef9d87" UPDATED="2008-12-10 09:56:07.238"/>
+ <HOMOTYPICALGROUP ID="22" CREATED="2008-12-10 09:56:07.0" UUID="9b17ca72-bdef-498a-bc76-cdbb2734c08d" UPDATED="2008-12-10 09:56:07.253"/>
+ <HOMOTYPICALGROUP ID="23" CREATED="2008-12-10 09:56:07.0" UUID="b4406aa9-c923-40e8-b75e-39b434149a03" UPDATED="2008-12-10 09:56:07.253"/>
+ <HOMOTYPICALGROUP ID="24" CREATED="2008-12-10 09:56:07.0" UUID="422bf053-72bb-4624-ac65-92de413c7ea2" UPDATED="2008-12-10 09:56:07.253"/>
+ <HOMOTYPICALGROUP ID="25" CREATED="2008-12-10 09:56:07.0" UUID="1cb31fff-de80-455b-898e-7da8eea8ddb4" UPDATED="2008-12-10 09:56:07.253"/>
+ <HOMOTYPICALGROUP ID="26" CREATED="2008-12-10 09:56:07.0" UUID="7dcdf873-5dfe-4349-9509-3aee3d3f830f" UPDATED="2008-12-10 09:56:07.253"/>
+ <HOMOTYPICALGROUP ID="27" CREATED="2008-12-10 09:56:07.0" UUID="ff5ccd39-ac81-4859-a158-7487eddfcd2f" UPDATED="2008-12-10 09:56:07.253"/>
+ <HOMOTYPICALGROUP ID="28" CREATED="2008-12-10 09:56:07.0" UUID="ca6c8000-a5da-4464-8f4b-8c602a7c58df" UPDATED="2008-12-10 09:56:07.253"/>
+ <HOMOTYPICALGROUP ID="29" CREATED="2008-12-10 09:56:07.0" UUID="4a617bae-ef0d-4f4f-91d3-8f246dea1479" UPDATED="2008-12-10 09:56:07.253"/>
+ <HOMOTYPICALGROUP ID="30" CREATED="2008-12-10 09:56:07.0" UUID="17fa02ae-3506-4ed3-b79e-611aa862cacc" UPDATED="2008-12-10 09:56:07.253"/>
+ <HOMOTYPICALGROUP ID="31" CREATED="2008-12-10 09:56:07.0" UUID="cee3baf6-c479-4606-8d5a-bc8380167175" UPDATED="2008-12-10 09:56:07.238"/>
+ <HOMOTYPICALGROUP ID="32" CREATED="2008-12-10 09:56:07.0" UUID="036286ea-3379-4f86-b100-11179cf2e793" UPDATED="2008-12-10 09:56:07.253"/>
+ <HOMOTYPICALGROUP ID="33" CREATED="2008-12-10 09:56:07.0" UUID="a218a8e3-70ae-4c58-9463-7725e1b8e112" UPDATED="2008-12-10 09:56:07.253"/>
+ <HOMOTYPICALGROUP ID="34" CREATED="2008-12-10 09:56:07.0" UUID="5381dcb7-bddf-49d5-8669-1f34d8a43b32" UPDATED="2008-12-10 09:56:07.253"/>
+ <HOMOTYPICALGROUP ID="35" CREATED="2008-12-10 09:56:07.0" UUID="baf4e929-4291-4635-aa35-1255069eefe6" UPDATED="2008-12-10 09:56:07.253"/>
+ <HOMOTYPICALGROUP ID="36" CREATED="2008-12-10 09:56:07.0" UUID="b2b007a4-9c8c-43a1-8da4-20ed85464cf2" UPDATED="2008-12-10 09:56:07.253"/>
+ <HOMOTYPICALGROUP ID="37" CREATED="2008-12-10 09:56:07.0" UUID="c9bb41cf-d577-46d6-932e-45e5d85f573e" UPDATED="2008-12-10 09:56:07.253"/>
+ <LANGUAGESTRING ID="1" CREATED="2008-12-10 09:56:07.0" UUID="2a5ceebb-4830-4524-b330-78461bf8cb6b" UPDATED="2008-12-10 09:56:07.253" TEXT="Lorem ipsum dolor sit amet, consectetur adipiscing elit." LANGUAGE_ID="1"/>
+ <LANGUAGESTRING ID="2" CREATED="2008-12-10 09:56:07.0" UUID="373e7154-9372-4985-b77e-68df28e3f84b" UPDATED="2008-12-10 09:56:07.253" TEXT="Praesent vitae turpis vitae sapien sodales sagittis." LANGUAGE_ID="1"/>
+ <LANGUAGESTRING ID="3" CREATED="2008-12-10 09:56:07.0" UUID="f72f17d8-58c2-4c4e-b052-89d9016b6d02" UPDATED="2008-12-10 09:56:07.253" TEXT="Maecenas congue ligula ut nulla. Nullam commodo euismod dolor." LANGUAGE_ID="1"/>
+ <LANGUAGESTRING ID="4" CREATED="2013-06-26 09:56:07.0" UUID="5415450b-f9fd-493d-bdd5-623c5fd34254" UPDATED="2008-12-10 09:56:07.253" TEXT="TextData (A) for a SpecimenDescription" LANGUAGE_ID="1"/>
+ <ORIGINALSOURCEBASE DTYPE="IdentifiableSource" ID="1" UUID="ebe5a015-6f31-4b62-9fef-d0b4bcfb7e5a" SOURCETYPE="PTS" SOURCEDOBJ_TYPE="eu.etaxonomy.cdm.model.description.TextData" SOURCEDOBJ_ID="34" CITATION_ID="1"/>
+ <REFERENCE ID="1" CREATED="2008-12-10 09:56:07.0" UUID="596b1325-be50-4b0a-9aa2-3ecd610215f2" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Lorem ipsum" PROTECTEDABBREVTITLECACHE="false" ABBREVTITLECACHE="Sp. Pl." ABBREVTITLE="Sp. Pl." NOMENCLATURALLYRELEVANT="false" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1"/>
+ <REFERENCE ID="2" CREATED="2008-12-10 09:56:07.0" UUID="ad4322b7-4b05-48af-be70-f113e46c545e" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="cate-sphingidae.org" PROTECTEDABBREVTITLECACHE="false" ABBREVTITLECACHE="Sp. Pl." ABBREVTITLE="Sp. Pl." NOMENCLATURALLYRELEVANT="false" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1"/>
+ <REFERENCE ID="3" CREATED="2008-12-10 09:56:07.0" UUID="3eea6f96-0682-4025-8cdd-aaaf7c915ae2" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="cate-araceae.org" PROTECTEDABBREVTITLECACHE="false" ABBREVTITLECACHE="Sp. Pl." ABBREVTITLE="Sp. Pl." NOMENCLATURALLYRELEVANT="false" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1"/>
+ <TAXONBASE DTYPE="Taxon" ID="1" CREATED="2008-12-10 09:56:07.0" UUID="496b1325-be50-4b0a-9aa2-3ecd610215f2" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE=" sec. ???" PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" TAXONSTATUSUNKNOWN="false" UNPLACED="false" EXCLUDED="false" TAXONOMICCHILDRENCOUNT="1" NAME_ID="1" SEC_ID="1"/>
+ <TAXONBASE DTYPE="Taxon" ID="2" CREATED="2008-12-10 09:56:07.0" UUID="822d98dc-9ef7-44b7-a870-94573a3bcb46" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE=" sec. ???" PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" TAXONSTATUSUNKNOWN="false" UNPLACED="false" EXCLUDED="false" TAXONOMICCHILDRENCOUNT="0" NAME_ID="2" SEC_ID="1"/>
+ <TAXONBASE DTYPE="Taxon" ID="3" CREATED="2008-12-10 09:56:07.0" UUID="54e767ee-894e-4540-a758-f906ecb4e2d9" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Sphingidae Linnaeus, 1758 sec. cate-sphingidae.org" PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" TAXONSTATUSUNKNOWN="false" UNPLACED="false" EXCLUDED="false" TAXONOMICCHILDRENCOUNT="204" NAME_ID="3" SEC_ID="2"/>
+ <TAXONBASE DTYPE="Taxon" ID="4" CREATED="2008-12-10 09:56:07.0" UUID="ef96fafa-7750-4141-b31b-1ad1daab3e76" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Lathoe Fabricius, 1807 sec. cate-sphingidae.org" PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" TAXONSTATUSUNKNOWN="false" UNPLACED="false" EXCLUDED="false" TAXONOMICCHILDRENCOUNT="6" NAME_ID="4" SEC_ID="2"/>
+ <TAXONBASE DTYPE="Taxon" ID="5" CREATED="2008-12-10 09:56:07.0" UUID="17233b5e-74e7-42fc-bc37-522684657ed4" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Smerinthus Latreille, 1802 sec. cate-sphingidae.org" PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" TAXONSTATUSUNKNOWN="false" UNPLACED="false" EXCLUDED="false" TAXONOMICCHILDRENCOUNT="15" NAME_ID="5" SEC_ID="2"/>
+ <TAXONBASE DTYPE="Taxon" ID="6" CREATED="2008-12-10 09:56:07.0" UUID="b989a278-c414-49f7-9a10-7d784700e4c4" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Manduca Hübner, 1807 sec. cate-sphingidae.org" PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" TAXONSTATUSUNKNOWN="false" UNPLACED="false" EXCLUDED="false" TAXONOMICCHILDRENCOUNT="18" NAME_ID="6" SEC_ID="2"/>
+ <TAXONBASE DTYPE="Taxon" ID="7" CREATED="2008-12-10 09:56:07.0" UUID="15611343-6b11-487f-8233-4756a49a83e2" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Lepchina Oberthür, 1904 sec. cate-sphingidae.org" PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" TAXONSTATUSUNKNOWN="false" UNPLACED="false" EXCLUDED="false" TAXONOMICCHILDRENCOUNT="13" NAME_ID="7" SEC_ID="2"/>
+ <TAXONBASE DTYPE="Taxon" ID="8" CREATED="2008-12-10 09:56:07.0" UUID="1489d3dd-71da-4b34-aa5a-d15fccb6bb22" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Smerinthus kindermannii Lederer, 1853 sec. cate-sphingidae.org" PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" TAXONSTATUSUNKNOWN="false" UNPLACED="false" EXCLUDED="false" TAXONOMICCHILDRENCOUNT="4" NAME_ID="8" SEC_ID="2"/>
+ <TAXONBASE DTYPE="Taxon" ID="9" CREATED="2008-12-10 09:56:07.0" UUID="900052b7-b69c-4e26-a8f0-01c215214c40" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Mimas Hübner, 1819 sec. cate-sphingidae.org" PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" TAXONSTATUSUNKNOWN="false" UNPLACED="false" EXCLUDED="false" TAXONOMICCHILDRENCOUNT="2" NAME_ID="9" SEC_ID="2"/>
+ <TAXONBASE DTYPE="Taxon" ID="10" CREATED="2008-12-10 09:56:07.0" UUID="8e312b40-924f-46b7-8e8d-837f9ad12f51" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Callambulyx Rothschild & Jordan, 1903 sec. cate-sphingidae.org" PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" TAXONSTATUSUNKNOWN="false" UNPLACED="false" EXCLUDED="false" TAXONOMICCHILDRENCOUNT="9" NAME_ID="10" SEC_ID="2"/>
+ <TAXONBASE DTYPE="Taxon" ID="11" CREATED="2008-12-10 09:56:07.0" UUID="53fac190-0b4b-44f5-b4e7-b1ca9a25a6e9" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Dolbina Staudinger, 1877 sec. cate-sphingidae.org" PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" TAXONSTATUSUNKNOWN="false" UNPLACED="false" EXCLUDED="false" TAXONOMICCHILDRENCOUNT="7" NAME_ID="11" SEC_ID="2"/>
+ <TAXONBASE DTYPE="Taxon" ID="12" CREATED="2008-12-10 09:56:07.0" UUID="7748d6f0-04d8-4052-9904-c43f55682419" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Akbesia Rothschild & Jordan, 1903 sec. cate-sphingidae.org" PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" TAXONSTATUSUNKNOWN="false" UNPLACED="false" EXCLUDED="false" TAXONOMICCHILDRENCOUNT="1" NAME_ID="12" SEC_ID="2"/>
+ <TAXONBASE DTYPE="Taxon" ID="13" CREATED="2008-12-10 09:56:07.0" UUID="63f251fa-f283-46bb-ad42-7390f0a1e806" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Sphinx Linnaeus, 1758 sec. cate-sphingidae.org" PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" TAXONSTATUSUNKNOWN="false" UNPLACED="false" EXCLUDED="false" TAXONOMICCHILDRENCOUNT="49" NAME_ID="13" SEC_ID="2"/>
+ <TAXONBASE DTYPE="Taxon" ID="14" CREATED="2008-12-10 09:56:07.0" UUID="00245994-149e-4cc4-8186-aefd48d4acf8" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Agrius Hübner, 1819 sec. cate-sphingidae.org" PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" TAXONSTATUSUNKNOWN="false" UNPLACED="false" EXCLUDED="false" TAXONOMICCHILDRENCOUNT="6" NAME_ID="14" SEC_ID="2"/>
+ <TAXONBASE DTYPE="Taxon" ID="15" CREATED="2008-12-10 09:56:07.0" UUID="c5cc8674-4242-49a4-aada-72d63194f5fa" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Acherontia Laspeyres, 1809 sec. cate-sphingidae.org" PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" TAXONSTATUSUNKNOWN="false" UNPLACED="false" EXCLUDED="false" TAXONOMICCHILDRENCOUNT="3" NAME_ID="15" SEC_ID="2"/>
+ <TAXONBASE DTYPE="Taxon" ID="16" CREATED="2008-12-10 09:56:07.0" UUID="6ecc117a-3e9a-4030-8748-f63a0412e065" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Hemaris Dalman, 1816 sec. cate-sphingidae.org" PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" TAXONSTATUSUNKNOWN="false" UNPLACED="false" EXCLUDED="false" TAXONOMICCHILDRENCOUNT="23" NAME_ID="16" SEC_ID="2"/>
+ <TAXONBASE DTYPE="Taxon" ID="17" CREATED="2008-12-10 09:56:07.0" UUID="f6700b5b-b6dc-421a-b979-9429ffad8262" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Proserpinus Hübner, 1819 sec. cate-sphingidae.org" PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" TAXONSTATUSUNKNOWN="false" UNPLACED="false" EXCLUDED="false" TAXONOMICCHILDRENCOUNT="7" NAME_ID="17" SEC_ID="2"/>
+ <TAXONBASE DTYPE="Taxon" ID="18" CREATED="2008-12-10 09:56:07.0" UUID="b503efaf-b800-421b-beba-3c6fab4b3c34" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Sphingonaepiopsis Wallengren, 1858 sec. cate-sphingidae.org" PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" TAXONSTATUSUNKNOWN="false" UNPLACED="false" EXCLUDED="false" TAXONOMICCHILDRENCOUNT="7" NAME_ID="18" SEC_ID="2"/>
+ <TAXONBASE DTYPE="Taxon" ID="19" CREATED="2008-12-10 09:56:07.0" UUID="a9f42927-e507-4fda-9629-62073a908aae" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Rethera Rothschild & Jordan, 1903 sec. cate-sphingidae.org" PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" TAXONSTATUSUNKNOWN="false" UNPLACED="false" EXCLUDED="false" TAXONOMICCHILDRENCOUNT="4" NAME_ID="19" SEC_ID="2"/>
+ <TAXONBASE DTYPE="Taxon" ID="20" CREATED="2008-12-10 09:56:07.0" UUID="557ac748-90df-47a6-b6f4-92d7b1d53abb" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Daphnis Hübner, 1819 sec. cate-sphingidae.org" PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" TAXONSTATUSUNKNOWN="false" UNPLACED="false" EXCLUDED="false" TAXONOMICCHILDRENCOUNT="10" NAME_ID="20" SEC_ID="2"/>
+ <TAXONBASE DTYPE="Taxon" ID="21" CREATED="2008-12-10 09:56:07.0" UUID="c089d514-f599-4f5a-bc90-3a11176d0f76" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Clarina Tutt, 1903 sec. cate-sphingidae.org" PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" TAXONSTATUSUNKNOWN="false" UNPLACED="false" EXCLUDED="false" TAXONOMICCHILDRENCOUNT="2" NAME_ID="21" SEC_ID="2"/>
+ <TAXONBASE DTYPE="Taxon" ID="22" CREATED="2008-12-10 09:56:07.0" UUID="74ad1d5e-4f73-4e0d-a209-4bf07abd33fa" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Acosmeryx Boisduval, 1875 sec. cate-sphingidae.org" PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" TAXONSTATUSUNKNOWN="false" UNPLACED="false" EXCLUDED="false" TAXONOMICCHILDRENCOUNT="12" NAME_ID="22" SEC_ID="2"/>
+ <TAXONBASE DTYPE="Taxon" ID="23" CREATED="2008-12-10 09:56:07.0" UUID="8ecb0dfa-31fd-4f5a-bb83-b897cda813db" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Macroglossum Scopoli, 1777 sec. cate-sphingidae.org" PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" TAXONSTATUSUNKNOWN="false" UNPLACED="false" EXCLUDED="false" TAXONOMICCHILDRENCOUNT="104" NAME_ID="23" SEC_ID="2"/>
+ <TAXONBASE DTYPE="Taxon" ID="24" CREATED="2008-12-10 09:56:07.0" UUID="3d2a3441-4602-405f-8ba7-0685d88d7235" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Hyles Hübner, 1819 sec. cate-sphingidae.org" PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" TAXONSTATUSUNKNOWN="false" UNPLACED="false" EXCLUDED="false" TAXONOMICCHILDRENCOUNT="31" NAME_ID="24" SEC_ID="2"/>
+ <TAXONBASE DTYPE="Taxon" ID="25" CREATED="2008-12-10 09:56:07.0" UUID="4b47c134-0c99-43c9-a046-620a195cd69e" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Deilephila Laspeyres, 1809 sec. cate-sphingidae.org" PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" TAXONSTATUSUNKNOWN="false" UNPLACED="false" EXCLUDED="false" TAXONOMICCHILDRENCOUNT="4" NAME_ID="25" SEC_ID="2"/>
+ <TAXONBASE DTYPE="Taxon" ID="26" CREATED="2008-12-10 09:56:07.0" UUID="7832c932-f687-4180-a808-fa82d57a9ac8" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Hippotion Hübner, 1819 sec. cate-sphingidae.org" PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" TAXONSTATUSUNKNOWN="false" UNPLACED="false" EXCLUDED="false" TAXONOMICCHILDRENCOUNT="40" NAME_ID="26" SEC_ID="2"/>
+ <TAXONBASE DTYPE="Synonym" ID="27" CREATED="2008-12-10 09:56:07.0" UUID="d75b2e3d-7394-4ada-b6a5-93175b8751c1" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Atropos Agassiz, 1846 sec. cate-sphingidae.org" PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" TAXONSTATUSUNKNOWN="false" UNPLACED="false" EXCLUDED="false" TAXONOMICCHILDRENCOUNT="[null]" NAME_ID="27" SEC_ID="2"/>
+ <TAXONBASE DTYPE="Synonym" ID="28" CREATED="2008-12-10 09:56:07.0" UUID="6bfedf25-6dbc-4d5c-9d56-84f9052f3b2a" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Atropos Oken, 1815 sec. cate-sphingidae.org" PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" TAXONSTATUSUNKNOWN="false" UNPLACED="false" EXCLUDED="false" TAXONOMICCHILDRENCOUNT="[null]" NAME_ID="28" SEC_ID="2"/>
+ <TAXONBASE DTYPE="Synonym" ID="29" CREATED="2008-12-10 09:56:07.0" UUID="b3cc5671-5082-4e67-9310-aa88b331f3c7" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Brachyglossa Boisduval, 1828 sec. cate-sphingidae.org" PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" TAXONSTATUSUNKNOWN="false" UNPLACED="false" EXCLUDED="false" TAXONOMICCHILDRENCOUNT="[null]" NAME_ID="29" SEC_ID="2"/>
+ <TAXONBASE DTYPE="Synonym" ID="30" CREATED="2008-12-10 09:56:07.0" UUID="f017e915-0266-4f6d-8db4-eff4e8d6af5c" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Manduca Hübner, 1806 sec. cate-sphingidae.org" PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" TAXONSTATUSUNKNOWN="false" UNPLACED="false" EXCLUDED="false" TAXONOMICCHILDRENCOUNT="[null]" NAME_ID="30" SEC_ID="2"/>
+ <TAXONBASE DTYPE="Synonym" ID="31" CREATED="2008-12-10 09:56:07.0" UUID="3da4ab34-6c50-4586-801e-732615899b07" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Atropos Leach, 1815 sec. cate-sphingidae.org" PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" TAXONSTATUSUNKNOWN="false" UNPLACED="false" EXCLUDED="false" TAXONOMICCHILDRENCOUNT="[null]" NAME_ID="31" SEC_ID="2"/>
+ <TAXONBASE DTYPE="Taxon" ID="32" CREATED="2008-12-10 09:56:07.0" UUID="d88aa25c-7984-4870-bc9c-821f094d3a48" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Manduca afflicta (Grote, 1865) sec. cate-sphingidae.org" PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" TAXONSTATUSUNKNOWN="false" UNPLACED="false" EXCLUDED="false" TAXONOMICCHILDRENCOUNT="1" NAME_ID="32" SEC_ID="2"/>
+ <TAXONBASE DTYPE="Taxon" ID="33" CREATED="2008-12-10 09:56:07.0" UUID="2c41e444-b160-4c6a-a1be-d5317d97d68d" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Manduca bergarmatipes (Clark, 1927) sec. cate-sphingidae.org" PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" TAXONSTATUSUNKNOWN="false" UNPLACED="false" EXCLUDED="false" TAXONOMICCHILDRENCOUNT="0" NAME_ID="33" SEC_ID="2"/>
+ <TAXONBASE DTYPE="Taxon" ID="34" CREATED="2008-12-10 09:56:07.0" UUID="7fe66bfd-235b-4164-8f0a-d054b5e962ba" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Manduca chinchilla (Gehlen, 1942) sec. cate-sphingidae.org" PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" TAXONSTATUSUNKNOWN="false" UNPLACED="false" EXCLUDED="false" TAXONOMICCHILDRENCOUNT="0" NAME_ID="34" SEC_ID="2"/>
+ <TAXONBASE DTYPE="Taxon" ID="35" CREATED="2008-12-10 09:56:07.0" UUID="4cab3cc5-eb80-477c-ac1b-be3c3d0a5a85" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Acherontia atropos (Linnaeus, 1758) sec. cate-sphingidae.org" PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" TAXONSTATUSUNKNOWN="false" UNPLACED="false" EXCLUDED="false" TAXONOMICCHILDRENCOUNT="0" NAME_ID="35" SEC_ID="2"/>
+ <TAXONBASE DTYPE="Taxon" ID="36" CREATED="2008-12-10 09:56:07.0" UUID="b04cc9cb-2b4a-4cc4-a94a-3c93a2158b06" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Acherontia lachesis (Fabricius, 1798) sec. cate-sphingidae.org" PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" TAXONSTATUSUNKNOWN="false" UNPLACED="false" EXCLUDED="false" TAXONOMICCHILDRENCOUNT="1" NAME_ID="36" SEC_ID="2"/>
+ <TAXONBASE DTYPE="Taxon" ID="37" CREATED="2008-12-10 09:56:07.0" UUID="7b8b5cb3-37ba-4dba-91ac-4c6ffd6ac331" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Acherontia styx Westwood, 1847 sec. cate-sphingidae.org" PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" TAXONSTATUSUNKNOWN="false" UNPLACED="false" EXCLUDED="false" TAXONOMICCHILDRENCOUNT="1" NAME_ID="37" SEC_ID="2"/>
+ <TAXONBASE DTYPE="Taxon" ID="38" CREATED="2008-12-10 09:56:07.0" UUID="bc09aca6-06fd-4905-b1e7-cbf7cc65d783" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE="Cryptocoryne x purpurea nothovar borneoensis N.Jacobsen, Bastm. & Yuji Sasaki sec. cate-sphingidae.org" PUBLISH="true" DOUBTFUL="false" USENAMECACHE="false" TAXONSTATUSUNKNOWN="false" UNPLACED="false" EXCLUDED="false" TAXONOMICCHILDRENCOUNT="0" NAME_ID="38" SEC_ID="3"/>
+ <TAXONNAMEBASE DTYPE="BotanicalName" ID="1" CREATED="2008-12-10 09:56:07.0" UUID="a49a3963-c4ea-4047-8588-2f8f15352730" UPDATED="2008-12-10 09:56:07.238" PROTECTEDTITLECACHE="true" TITLECACHE="" FULLTITLECACHE="" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1" PROTECTEDFULLTITLECACHE="true" AUTHORSHIPCACHE="" BINOMHYBRID="false" GENUSORUNINOMIAL="Aus" HYBRIDFORMULA="false" MONOMHYBRID="false" NAMECACHE="Aus" PROTECTEDAUTHORSHIPCACHE="true" PROTECTEDNAMECACHE="true" TRINOMHYBRID="false" ANAMORPHIC="false" HOMOTYPICALGROUP_ID="1" NOMENCLATURALREFERENCE_ID="1" RANK_ID="774"/>
+ <TAXONNAMEBASE DTYPE="BotanicalName" ID="2" CREATED="2008-12-10 09:56:07.0" UUID="05a438d6-065f-49ef-84db-c7dc2c259975" UPDATED="2008-12-10 09:56:07.253" PROTECTEDTITLECACHE="true" TITLECACHE=" " FULLTITLECACHE=" " PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1" PROTECTEDFULLTITLECACHE="true" AUTHORSHIPCACHE="" BINOMHYBRID="false" GENUSORUNINOMIAL="Aus" HYBRIDFORMULA="false" MONOMHYBRID="false" NAMECACHE="Aus aus" PROTECTEDAUTHORSHIPCACHE="true" PROTECTEDNAMECACHE="true" TRINOMHYBRID="false" ANAMORPHIC="false" HOMOTYPICALGROUP_ID="2" NOMENCLATURALREFERENCE_ID="1" RANK_ID="765"/>
+ <TAXONNAMEBASE DTYPE="ZoologicalName" ID="3" CREATED="2008-12-10 09:56:07.0" UUID="9640a158-2bdb-4cbc-bff6-8f77e781f86b" UPDATED="2008-12-10 09:56:07.238" PROTECTEDTITLECACHE="true" TITLECACHE="Sphingidae Linnaeus, 1758" FULLTITLECACHE="" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1" PROTECTEDFULLTITLECACHE="true" AUTHORSHIPCACHE="Linnaeus, 1758" BINOMHYBRID="false" GENUSORUNINOMIAL="Sphingidae" HYBRIDFORMULA="false" MONOMHYBRID="false" NAMECACHE="Sphingidae" PROTECTEDAUTHORSHIPCACHE="true" PROTECTEDNAMECACHE="true" TRINOMHYBRID="false" ANAMORPHIC="false" HOMOTYPICALGROUP_ID="3" NOMENCLATURALREFERENCE_ID="1" RANK_ID="782"/>
+ <TAXONNAMEBASE DTYPE="ZoologicalName" ID="4" CREATED="2008-12-10 09:56:07.0" UUID="446d8d76-e206-49e1-b6da-d06ce1f296e1" UPDATED="2008-12-10 09:56:07.238" PROTECTEDTITLECACHE="true" TITLECACHE="Lathoe Fabricius, 1807" FULLTITLECACHE="" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1" PROTECTEDFULLTITLECACHE="true" AUTHORSHIPCACHE="Fabricius, 1807" BINOMHYBRID="false" GENUSORUNINOMIAL="Laothoe" HYBRIDFORMULA="false" MONOMHYBRID="false" NAMECACHE="Laothoe" PROTECTEDAUTHORSHIPCACHE="true" PROTECTEDNAMECACHE="true" TRINOMHYBRID="false" ANAMORPHIC="false" HOMOTYPICALGROUP_ID="4" NOMENCLATURALREFERENCE_ID="1" RANK_ID="774"/>
+ <TAXONNAMEBASE DTYPE="ZoologicalName" ID="5" CREATED="2008-12-10 09:56:07.0" UUID="3d0b9061-fc9d-4de5-9dc1-341e10eb139e" UPDATED="2008-12-10 09:56:07.238" PROTECTEDTITLECACHE="true" TITLECACHE="Smerinthus Latreille, 1802" FULLTITLECACHE="" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1" PROTECTEDFULLTITLECACHE="true" AUTHORSHIPCACHE="Latreille, 1802" BINOMHYBRID="false" GENUSORUNINOMIAL="Smerinthus" HYBRIDFORMULA="false" MONOMHYBRID="false" NAMECACHE="Smerinthus" PROTECTEDAUTHORSHIPCACHE="true" PROTECTEDNAMECACHE="true" TRINOMHYBRID="false" ANAMORPHIC="false" HOMOTYPICALGROUP_ID="5" NOMENCLATURALREFERENCE_ID="1" RANK_ID="774"/>
+ <TAXONNAMEBASE DTYPE="ZoologicalName" ID="6" CREATED="2008-12-10 09:56:07.0" UUID="e4d3c75c-3bfb-451e-ade1-e5e0307879dd" UPDATED="2008-12-10 09:56:07.238" PROTECTEDTITLECACHE="true" TITLECACHE="Manduca Hübner, 1807" FULLTITLECACHE="" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1" PROTECTEDFULLTITLECACHE="true" AUTHORSHIPCACHE="Hübner, 1807" BINOMHYBRID="false" GENUSORUNINOMIAL="Manduca" HYBRIDFORMULA="false" MONOMHYBRID="false" NAMECACHE="Manduca" PROTECTEDAUTHORSHIPCACHE="true" PROTECTEDNAMECACHE="true" TRINOMHYBRID="false" ANAMORPHIC="false" HOMOTYPICALGROUP_ID="6" NOMENCLATURALREFERENCE_ID="1" RANK_ID="774"/>
+ <TAXONNAMEBASE DTYPE="ZoologicalName" ID="7" CREATED="2008-12-10 09:56:07.0" UUID="9dcb7f80-05c6-4eb0-bc04-8a72353a67d7" UPDATED="2008-12-10 09:56:07.238" PROTECTEDTITLECACHE="true" TITLECACHE="Lepchina Oberthür, 1904" FULLTITLECACHE="" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1" PROTECTEDFULLTITLECACHE="true" AUTHORSHIPCACHE="Oberthür, 1904" BINOMHYBRID="false" GENUSORUNINOMIAL="Lepchina" HYBRIDFORMULA="false" MONOMHYBRID="false" NAMECACHE="Lepchina" PROTECTEDAUTHORSHIPCACHE="true" PROTECTEDNAMECACHE="true" TRINOMHYBRID="false" ANAMORPHIC="false" HOMOTYPICALGROUP_ID="7" NOMENCLATURALREFERENCE_ID="1" RANK_ID="774"/>
+ <TAXONNAMEBASE DTYPE="ZoologicalName" ID="8" CREATED="2008-12-10 09:56:07.0" UUID="f2983a50-5121-4641-a9ab-0507821b7563" UPDATED="2008-12-10 09:56:07.238" PROTECTEDTITLECACHE="true" TITLECACHE="Smerinthus kindermannii Lederer, 1853" FULLTITLECACHE="" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1" PROTECTEDFULLTITLECACHE="true" AUTHORSHIPCACHE="Lederer, 1853" BINOMHYBRID="false" GENUSORUNINOMIAL="Smerinthus" HYBRIDFORMULA="false" MONOMHYBRID="false" NAMECACHE="Smerinthus kindermannii" PROTECTEDAUTHORSHIPCACHE="true" PROTECTEDNAMECACHE="true" TRINOMHYBRID="false" ANAMORPHIC="false" HOMOTYPICALGROUP_ID="8" NOMENCLATURALREFERENCE_ID="1" RANK_ID="765"/>
+ <TAXONNAMEBASE DTYPE="ZoologicalName" ID="9" CREATED="2008-12-10 09:56:07.0" UUID="73a90270-16bb-43f6-b7f0-305b617c9971" UPDATED="2008-12-10 09:56:07.238" PROTECTEDTITLECACHE="true" TITLECACHE="Mimas Hübner, 1819" FULLTITLECACHE="" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1" PROTECTEDFULLTITLECACHE="true" AUTHORSHIPCACHE="Hübner, 1819" BINOMHYBRID="false" GENUSORUNINOMIAL="Mimas" HYBRIDFORMULA="false" MONOMHYBRID="false" NAMECACHE="Mimas" PROTECTEDAUTHORSHIPCACHE="true" PROTECTEDNAMECACHE="true" TRINOMHYBRID="false" ANAMORPHIC="false" HOMOTYPICALGROUP_ID="9" NOMENCLATURALREFERENCE_ID="1" RANK_ID="774"/>
+ <TAXONNAMEBASE DTYPE="ZoologicalName" ID="10" CREATED="2008-12-10 09:56:07.0" UUID="a67e7431-5c04-4cb6-b83e-c50c439561fe" UPDATED="2008-12-10 09:56:07.238" PROTECTEDTITLECACHE="true" TITLECACHE="Callambulyx Rothschild & Jordan, 1903" FULLTITLECACHE="" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1" PROTECTEDFULLTITLECACHE="true" AUTHORSHIPCACHE="Rothschild & Jordan, 1903" BINOMHYBRID="false" GENUSORUNINOMIAL="Callambulyx" HYBRIDFORMULA="false" MONOMHYBRID="false" NAMECACHE="Callambulyx" PROTECTEDAUTHORSHIPCACHE="true" PROTECTEDNAMECACHE="true" TRINOMHYBRID="false" ANAMORPHIC="false" HOMOTYPICALGROUP_ID="10" NOMENCLATURALREFERENCE_ID="1" RANK_ID="774"/>
+ <TAXONNAMEBASE DTYPE="ZoologicalName" ID="11" CREATED="2008-12-10 09:56:07.0" UUID="8e6aaf9b-5b99-4525-873b-f535d35834ac" UPDATED="2008-12-10 09:56:07.238" PROTECTEDTITLECACHE="true" TITLECACHE="Dolbina Staudinger, 1877" FULLTITLECACHE="" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1" PROTECTEDFULLTITLECACHE="true" AUTHORSHIPCACHE="Staudinger, 1877" BINOMHYBRID="false" GENUSORUNINOMIAL="Dolbina" HYBRIDFORMULA="false" MONOMHYBRID="false" NAMECACHE="Dolbina" PROTECTEDAUTHORSHIPCACHE="true" PROTECTEDNAMECACHE="true" TRINOMHYBRID="false" ANAMORPHIC="false" HOMOTYPICALGROUP_ID="11" NOMENCLATURALREFERENCE_ID="1" RANK_ID="774"/>
+ <TAXONNAMEBASE DTYPE="ZoologicalName" ID="12" CREATED="2008-12-10 09:56:07.0" UUID="c1864a99-c025-47e1-87f5-5917b905cca1" UPDATED="2008-12-10 09:56:07.238" PROTECTEDTITLECACHE="true" TITLECACHE="Akbesia Rothschild & Jordan, 1903" FULLTITLECACHE="" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1" PROTECTEDFULLTITLECACHE="true" AUTHORSHIPCACHE="Rothschild & Jordan, 1903" BINOMHYBRID="false" GENUSORUNINOMIAL="Akbesia" HYBRIDFORMULA="false" MONOMHYBRID="false" NAMECACHE="Akbesia" PROTECTEDAUTHORSHIPCACHE="true" PROTECTEDNAMECACHE="true" TRINOMHYBRID="false" ANAMORPHIC="false" HOMOTYPICALGROUP_ID="12" NOMENCLATURALREFERENCE_ID="1" RANK_ID="774"/>
+ <TAXONNAMEBASE DTYPE="ZoologicalName" ID="13" CREATED="2008-12-10 09:56:07.0" UUID="071a336b-3f31-44d8-bc81-4505dd7ca50b" UPDATED="2008-12-10 09:56:07.238" PROTECTEDTITLECACHE="true" TITLECACHE="Sphinx Linnaeus, 1758" FULLTITLECACHE="" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1" PROTECTEDFULLTITLECACHE="true" AUTHORSHIPCACHE="Linnaeus, 1758" BINOMHYBRID="false" GENUSORUNINOMIAL="Sphinx" HYBRIDFORMULA="false" MONOMHYBRID="false" NAMECACHE="Sphinx" PROTECTEDAUTHORSHIPCACHE="true" PROTECTEDNAMECACHE="true" TRINOMHYBRID="false" ANAMORPHIC="false" HOMOTYPICALGROUP_ID="13" NOMENCLATURALREFERENCE_ID="1" RANK_ID="774"/>
+ <TAXONNAMEBASE DTYPE="ZoologicalName" ID="14" CREATED="2008-12-10 09:56:07.0" UUID="e8bf37f5-5b87-43dc-8481-a2e58e4f1e71" UPDATED="2008-12-10 09:56:07.238" PROTECTEDTITLECACHE="true" TITLECACHE="Agrius Hübner, 1819" FULLTITLECACHE="" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1" PROTECTEDFULLTITLECACHE="true" AUTHORSHIPCACHE="Hübner, 1819" BINOMHYBRID="false" GENUSORUNINOMIAL="Agrius" HYBRIDFORMULA="false" MONOMHYBRID="false" NAMECACHE="Agrius" PROTECTEDAUTHORSHIPCACHE="true" PROTECTEDNAMECACHE="true" TRINOMHYBRID="false" ANAMORPHIC="false" HOMOTYPICALGROUP_ID="14" NOMENCLATURALREFERENCE_ID="1" RANK_ID="774"/>
+ <TAXONNAMEBASE DTYPE="ZoologicalName" ID="15" CREATED="2008-12-10 09:56:07.0" UUID="c2cab2ad-3e3a-47b8-8aa8-d9e1c0857647" UPDATED="2008-12-10 09:56:07.238" PROTECTEDTITLECACHE="true" TITLECACHE="Acherontia Laspeyres, 1809" FULLTITLECACHE="" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1" PROTECTEDFULLTITLECACHE="true" AUTHORSHIPCACHE="Laspeyres, 1809" BINOMHYBRID="false" GENUSORUNINOMIAL="Acherontia" HYBRIDFORMULA="false" MONOMHYBRID="false" NAMECACHE="Acherontia" PROTECTEDAUTHORSHIPCACHE="true" PROTECTEDNAMECACHE="true" TRINOMHYBRID="false" ANAMORPHIC="false" HOMOTYPICALGROUP_ID="15" NOMENCLATURALREFERENCE_ID="1" RANK_ID="774"/>
+ <TAXONNAMEBASE DTYPE="ZoologicalName" ID="16" CREATED="2008-12-10 09:56:07.0" UUID="3d54aed8-7caa-4c74-bbe2-7b946b63f39b" UPDATED="2008-12-10 09:56:07.238" PROTECTEDTITLECACHE="true" TITLECACHE="Hemaris Dalman, 1816" FULLTITLECACHE="" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1" PROTECTEDFULLTITLECACHE="true" AUTHORSHIPCACHE="Dalman, 1816" BINOMHYBRID="false" GENUSORUNINOMIAL="Hemaris" HYBRIDFORMULA="false" MONOMHYBRID="false" NAMECACHE="Hemaris" PROTECTEDAUTHORSHIPCACHE="true" PROTECTEDNAMECACHE="true" TRINOMHYBRID="false" ANAMORPHIC="false" HOMOTYPICALGROUP_ID="16" NOMENCLATURALREFERENCE_ID="1" RANK_ID="774"/>
+ <TAXONNAMEBASE DTYPE="ZoologicalName" ID="17" CREATED="2008-12-10 09:56:07.0" UUID="2a91a640-ab5a-4993-a58d-a07c0f2ecba3" UPDATED="2008-12-10 09:56:07.238" PROTECTEDTITLECACHE="true" TITLECACHE="Proserpinus Hübner, 1819" FULLTITLECACHE="" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1" PROTECTEDFULLTITLECACHE="true" AUTHORSHIPCACHE="Hübner, 1819" BINOMHYBRID="false" GENUSORUNINOMIAL="Proserpinus" HYBRIDFORMULA="false" MONOMHYBRID="false" NAMECACHE="Proserpinus" PROTECTEDAUTHORSHIPCACHE="true" PROTECTEDNAMECACHE="true" TRINOMHYBRID="false" ANAMORPHIC="false" HOMOTYPICALGROUP_ID="17" NOMENCLATURALREFERENCE_ID="1" RANK_ID="774"/>
+ <TAXONNAMEBASE DTYPE="ZoologicalName" ID="18" CREATED="2008-12-10 09:56:07.0" UUID="3f159abb-55fa-4c62-966d-3ff1ebc7b34b" UPDATED="2008-12-10 09:56:07.238" PROTECTEDTITLECACHE="true" TITLECACHE="Sphingonaepiopsis Wallengren, 1858" FULLTITLECACHE="" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1" PROTECTEDFULLTITLECACHE="true" AUTHORSHIPCACHE="Wallengren, 1858" BINOMHYBRID="false" GENUSORUNINOMIAL="Sphingonaepiopsis" HYBRIDFORMULA="false" MONOMHYBRID="false" NAMECACHE="Sphingonaepiopsis" PROTECTEDAUTHORSHIPCACHE="true" PROTECTEDNAMECACHE="true" TRINOMHYBRID="false" ANAMORPHIC="false" HOMOTYPICALGROUP_ID="18" NOMENCLATURALREFERENCE_ID="1" RANK_ID="774"/>
+ <TAXONNAMEBASE DTYPE="ZoologicalName" ID="19" CREATED="2008-12-10 09:56:07.0" UUID="2910ccab-35ea-45bb-ba1a-e8bceed11bd2" UPDATED="2008-12-10 09:56:07.238" PROTECTEDTITLECACHE="true" TITLECACHE="Rethera Rothschild & Jordan, 1903" FULLTITLECACHE="" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1" PROTECTEDFULLTITLECACHE="true" AUTHORSHIPCACHE="Rothschild & Jordan, 1903" BINOMHYBRID="false" GENUSORUNINOMIAL="Rethera" HYBRIDFORMULA="false" MONOMHYBRID="false" NAMECACHE="Rethera" PROTECTEDAUTHORSHIPCACHE="true" PROTECTEDNAMECACHE="true" TRINOMHYBRID="false" ANAMORPHIC="false" HOMOTYPICALGROUP_ID="19" NOMENCLATURALREFERENCE_ID="1" RANK_ID="774"/>
+ <TAXONNAMEBASE DTYPE="ZoologicalName" ID="20" CREATED="2008-12-10 09:56:07.0" UUID="e400203b-9b0f-4bc7-8aea-9f060de276de" UPDATED="2008-12-10 09:56:07.238" PROTECTEDTITLECACHE="true" TITLECACHE="Daphnis Hübner, 1819" FULLTITLECACHE="" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1" PROTECTEDFULLTITLECACHE="true" AUTHORSHIPCACHE="Hübner, 1819" BINOMHYBRID="false" GENUSORUNINOMIAL="Daphnis" HYBRIDFORMULA="false" MONOMHYBRID="false" NAMECACHE="Rethera" PROTECTEDAUTHORSHIPCACHE="true" PROTECTEDNAMECACHE="true" TRINOMHYBRID="false" ANAMORPHIC="false" HOMOTYPICALGROUP_ID="20" NOMENCLATURALREFERENCE_ID="1" RANK_ID="774"/>
+ <TAXONNAMEBASE DTYPE="ZoologicalName" ID="21" CREATED="2008-12-10 09:56:07.0" UUID="66354004-1ae2-4aa0-b4d6-d2c6c15a2fb5" UPDATED="2008-12-10 09:56:07.238" PROTECTEDTITLECACHE="true" TITLECACHE="Clarina Tutt, 1903" FULLTITLECACHE="" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1" PROTECTEDFULLTITLECACHE="true" AUTHORSHIPCACHE="Tutt, 1903" BINOMHYBRID="false" GENUSORUNINOMIAL="Clarina" HYBRIDFORMULA="false" MONOMHYBRID="false" NAMECACHE="Clarina" PROTECTEDAUTHORSHIPCACHE="true" PROTECTEDNAMECACHE="true" TRINOMHYBRID="false" ANAMORPHIC="false" HOMOTYPICALGROUP_ID="21" NOMENCLATURALREFERENCE_ID="1" RANK_ID="774"/>
+ <TAXONNAMEBASE DTYPE="ZoologicalName" ID="22" CREATED="2008-12-10 09:56:07.0" UUID="f57b8d58-e89d-40ea-9d5b-a2cf96d017eb" UPDATED="2008-12-10 09:56:07.238" PROTECTEDTITLECACHE="true" TITLECACHE="Acosmeryx Boisduval, 1875" FULLTITLECACHE="" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1" PROTECTEDFULLTITLECACHE="true" AUTHORSHIPCACHE="Boisduval, 1875" BINOMHYBRID="false" GENUSORUNINOMIAL="Acosmeryx" HYBRIDFORMULA="false" MONOMHYBRID="false" NAMECACHE="Acosmeryx" PROTECTEDAUTHORSHIPCACHE="true" PROTECTEDNAMECACHE="true" TRINOMHYBRID="false" ANAMORPHIC="false" HOMOTYPICALGROUP_ID="22" NOMENCLATURALREFERENCE_ID="1" RANK_ID="774"/>
+ <TAXONNAMEBASE DTYPE="ZoologicalName" ID="23" CREATED="2008-12-10 09:56:07.0" UUID="c3007d9a-3a7c-4cb1-9818-f4f529e760a4" UPDATED="2008-12-10 09:56:07.238" PROTECTEDTITLECACHE="true" TITLECACHE="Macroglossum Scopoli, 1777" FULLTITLECACHE="" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1" PROTECTEDFULLTITLECACHE="true" AUTHORSHIPCACHE="Scopoli, 1777" BINOMHYBRID="false" GENUSORUNINOMIAL="Macroglossum" HYBRIDFORMULA="false" MONOMHYBRID="false" NAMECACHE="Macroglossum" PROTECTEDAUTHORSHIPCACHE="true" PROTECTEDNAMECACHE="true" TRINOMHYBRID="false" ANAMORPHIC="false" HOMOTYPICALGROUP_ID="23" NOMENCLATURALREFERENCE_ID="1" RANK_ID="774"/>
+ <TAXONNAMEBASE DTYPE="ZoologicalName" ID="24" CREATED="2008-12-10 09:56:07.0" UUID="aa3dadc8-dc13-4e35-86cd-fd3ca2e796ca" UPDATED="2008-12-10 09:56:07.238" PROTECTEDTITLECACHE="true" TITLECACHE="Hyles Hübner, 1819" FULLTITLECACHE="" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1" PROTECTEDFULLTITLECACHE="true" AUTHORSHIPCACHE="Hübner, 1819" BINOMHYBRID="false" GENUSORUNINOMIAL="Hyles" HYBRIDFORMULA="false" MONOMHYBRID="false" NAMECACHE="Hyles" PROTECTEDAUTHORSHIPCACHE="true" PROTECTEDNAMECACHE="true" TRINOMHYBRID="false" ANAMORPHIC="false" HOMOTYPICALGROUP_ID="24" NOMENCLATURALREFERENCE_ID="1" RANK_ID="774"/>
+ <TAXONNAMEBASE DTYPE="ZoologicalName" ID="25" CREATED="2008-12-10 09:56:07.0" UUID="1673213d-60b4-4770-a8e9-509882340d0a" UPDATED="2008-12-10 09:56:07.238" PROTECTEDTITLECACHE="true" TITLECACHE="Deilephila Laspeyres, 1809" FULLTITLECACHE="" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1" PROTECTEDFULLTITLECACHE="true" AUTHORSHIPCACHE="Laspeyres, 1809" BINOMHYBRID="false" GENUSORUNINOMIAL="Deilephila" HYBRIDFORMULA="false" MONOMHYBRID="false" NAMECACHE="Deilephila" PROTECTEDAUTHORSHIPCACHE="true" PROTECTEDNAMECACHE="true" TRINOMHYBRID="false" ANAMORPHIC="false" HOMOTYPICALGROUP_ID="25" NOMENCLATURALREFERENCE_ID="1" RANK_ID="774"/>
+ <TAXONNAMEBASE DTYPE="ZoologicalName" ID="26" CREATED="2008-12-10 09:56:07.0" UUID="862897cc-a3e6-436d-899a-96f82d02b4a2" UPDATED="2008-12-10 09:56:07.238" PROTECTEDTITLECACHE="true" TITLECACHE="Hippotion Hübner, 1819" FULLTITLECACHE="" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1" PROTECTEDFULLTITLECACHE="true" AUTHORSHIPCACHE="Hübner, 1819" BINOMHYBRID="false" GENUSORUNINOMIAL="Hippotion" HYBRIDFORMULA="false" MONOMHYBRID="false" NAMECACHE="Hippotion" PROTECTEDAUTHORSHIPCACHE="true" PROTECTEDNAMECACHE="true" TRINOMHYBRID="false" ANAMORPHIC="false" HOMOTYPICALGROUP_ID="26" NOMENCLATURALREFERENCE_ID="1" RANK_ID="774"/>
+ <TAXONNAMEBASE DTYPE="ZoologicalName" ID="27" CREATED="2008-12-10 09:56:07.0" UUID="27004fcc-14d4-47d4-a3e1-75750fdb5b79" UPDATED="2008-12-10 09:56:07.238" PROTECTEDTITLECACHE="true" TITLECACHE="Atropos Agassiz, 1846" FULLTITLECACHE="" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1" PROTECTEDFULLTITLECACHE="true" AUTHORSHIPCACHE="Agassiz, 1846" BINOMHYBRID="false" GENUSORUNINOMIAL="Atropos" HYBRIDFORMULA="false" MONOMHYBRID="false" NAMECACHE="Atropos" PROTECTEDAUTHORSHIPCACHE="true" PROTECTEDNAMECACHE="true" TRINOMHYBRID="false" ANAMORPHIC="false" HOMOTYPICALGROUP_ID="27" NOMENCLATURALREFERENCE_ID="1" RANK_ID="774"/>
+ <TAXONNAMEBASE DTYPE="ZoologicalName" ID="28" CREATED="2008-12-10 09:56:07.0" UUID="748ccb21-f3a4-4f32-a514-53931965ca2d" UPDATED="2008-12-10 09:56:07.238" PROTECTEDTITLECACHE="true" TITLECACHE="Atropos Oken, 1815" FULLTITLECACHE="" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1" PROTECTEDFULLTITLECACHE="true" AUTHORSHIPCACHE="Oken, 1815" BINOMHYBRID="false" GENUSORUNINOMIAL="Atropos" HYBRIDFORMULA="false" MONOMHYBRID="false" NAMECACHE="Atropos" PROTECTEDAUTHORSHIPCACHE="true" PROTECTEDNAMECACHE="true" TRINOMHYBRID="false" ANAMORPHIC="false" HOMOTYPICALGROUP_ID="28" NOMENCLATURALREFERENCE_ID="1" RANK_ID="774"/>
+ <TAXONNAMEBASE DTYPE="ZoologicalName" ID="29" CREATED="2008-12-10 09:56:07.0" UUID="ea1f496a-c4cc-49e4-96d6-f46d58d23297" UPDATED="2008-12-10 09:56:07.238" PROTECTEDTITLECACHE="true" TITLECACHE="Brachyglossa Boisduval, 1828" FULLTITLECACHE="" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1" PROTECTEDFULLTITLECACHE="true" AUTHORSHIPCACHE="Boisduval, 1828" BINOMHYBRID="false" GENUSORUNINOMIAL="Brachyglossa" HYBRIDFORMULA="false" MONOMHYBRID="false" NAMECACHE="Brachyglossa" PROTECTEDAUTHORSHIPCACHE="true" PROTECTEDNAMECACHE="true" TRINOMHYBRID="false" ANAMORPHIC="false" HOMOTYPICALGROUP_ID="29" NOMENCLATURALREFERENCE_ID="1" RANK_ID="774"/>
+ <TAXONNAMEBASE DTYPE="ZoologicalName" ID="30" CREATED="2008-12-10 09:56:07.0" UUID="238e41b3-9f4f-44b7-8cf5-28090febe9bb" UPDATED="2008-12-10 09:56:07.238" PROTECTEDTITLECACHE="true" TITLECACHE="Manduca Hübner, 1806" FULLTITLECACHE="" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1" PROTECTEDFULLTITLECACHE="true" AUTHORSHIPCACHE="Hübner, 1806" BINOMHYBRID="false" GENUSORUNINOMIAL="Manduca" HYBRIDFORMULA="false" MONOMHYBRID="false" NAMECACHE="Manduca" PROTECTEDAUTHORSHIPCACHE="true" PROTECTEDNAMECACHE="true" TRINOMHYBRID="false" ANAMORPHIC="false" HOMOTYPICALGROUP_ID="30" NOMENCLATURALREFERENCE_ID="1" RANK_ID="774"/>
+ <TAXONNAMEBASE DTYPE="ZoologicalName" ID="31" CREATED="2008-12-10 09:56:07.0" UUID="feda2055-292c-4391-86b7-06bfdab77472" UPDATED="2008-12-10 09:56:07.238" PROTECTEDTITLECACHE="true" TITLECACHE="Atropos Leach, 1815" FULLTITLECACHE="" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1" PROTECTEDFULLTITLECACHE="true" AUTHORSHIPCACHE="Leach, 1815" BINOMHYBRID="false" GENUSORUNINOMIAL="Atropos" HYBRIDFORMULA="false" MONOMHYBRID="false" NAMECACHE="Atropos" PROTECTEDAUTHORSHIPCACHE="true" PROTECTEDNAMECACHE="true" TRINOMHYBRID="false" ANAMORPHIC="false" HOMOTYPICALGROUP_ID="15" NOMENCLATURALREFERENCE_ID="1" RANK_ID="774"/>
+ <TAXONNAMEBASE DTYPE="ZoologicalName" ID="32" CREATED="2008-12-10 09:56:07.0" UUID="9faf43ed-2003-4bc2-9dfd-61c71eaa3829" UPDATED="2008-12-10 09:56:07.238" PROTECTEDTITLECACHE="true" TITLECACHE="Manduca afflicta (Grote, 1865)" FULLTITLECACHE="" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1" PROTECTEDFULLTITLECACHE="true" AUTHORSHIPCACHE="(Grote, 1865)" BINOMHYBRID="false" GENUSORUNINOMIAL="Manduca" HYBRIDFORMULA="false" MONOMHYBRID="false" NAMECACHE="Manduca afflicta" PROTECTEDAUTHORSHIPCACHE="true" PROTECTEDNAMECACHE="true" TRINOMHYBRID="false" ANAMORPHIC="false" HOMOTYPICALGROUP_ID="31" NOMENCLATURALREFERENCE_ID="1" RANK_ID="765"/>
+ <TAXONNAMEBASE DTYPE="ZoologicalName" ID="33" CREATED="2008-12-10 09:56:07.0" UUID="666ecfcd-9ee3-41d4-8c47-7cb692cb7f27" UPDATED="2008-12-10 09:56:07.238" PROTECTEDTITLECACHE="true" TITLECACHE="Manduca bergarmatipes (Clark, 1927)" FULLTITLECACHE="" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1" PROTECTEDFULLTITLECACHE="true" AUTHORSHIPCACHE="(Clark, 1927)" BINOMHYBRID="false" GENUSORUNINOMIAL="Manduca" HYBRIDFORMULA="false" MONOMHYBRID="false" NAMECACHE="Manduca bergarmatipes" PROTECTEDAUTHORSHIPCACHE="true" PROTECTEDNAMECACHE="true" TRINOMHYBRID="false" ANAMORPHIC="false" HOMOTYPICALGROUP_ID="32" NOMENCLATURALREFERENCE_ID="1" RANK_ID="765"/>
+ <TAXONNAMEBASE DTYPE="ZoologicalName" ID="34" CREATED="2008-12-10 09:56:07.0" UUID="31b6c4fb-fcd1-4ce7-b26a-2ca15a6c8ac5" UPDATED="2008-12-10 09:56:07.238" PROTECTEDTITLECACHE="true" TITLECACHE="Manduca chinchilla (Gehlen, 1942)" FULLTITLECACHE="" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1" PROTECTEDFULLTITLECACHE="true" AUTHORSHIPCACHE="(Gehlen, 1942)" BINOMHYBRID="false" GENUSORUNINOMIAL="Manduca" HYBRIDFORMULA="false" MONOMHYBRID="false" NAMECACHE="Manduca chinchilla" PROTECTEDAUTHORSHIPCACHE="true" PROTECTEDNAMECACHE="true" TRINOMHYBRID="false" ANAMORPHIC="false" HOMOTYPICALGROUP_ID="33" NOMENCLATURALREFERENCE_ID="1" RANK_ID="765"/>
+ <TAXONNAMEBASE DTYPE="ZoologicalName" ID="35" CREATED="2008-12-10 09:56:07.0" UUID="866278ea-0a6a-4308-acb3-e7e22624e5ea" UPDATED="2008-12-10 09:56:07.238" PROTECTEDTITLECACHE="true" TITLECACHE="Acherontia atropos (Linnaeus, 1758)" FULLTITLECACHE="" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1" PROTECTEDFULLTITLECACHE="true" AUTHORSHIPCACHE="(Linnaeus, 1758)" BINOMHYBRID="false" GENUSORUNINOMIAL="Acherontia" HYBRIDFORMULA="false" MONOMHYBRID="false" NAMECACHE="Acherontia atropos" PROTECTEDAUTHORSHIPCACHE="true" PROTECTEDNAMECACHE="true" TRINOMHYBRID="false" ANAMORPHIC="false" HOMOTYPICALGROUP_ID="34" NOMENCLATURALREFERENCE_ID="1" RANK_ID="765"/>
+ <TAXONNAMEBASE DTYPE="ZoologicalName" ID="36" CREATED="2008-12-10 09:56:07.0" UUID="7969821b-a2cf-4d01-95ec-6a5ed0ca3f69" UPDATED="2008-12-10 09:56:07.238" PROTECTEDTITLECACHE="true" TITLECACHE="Acherontia lachesis (Fabricius, 1798)" FULLTITLECACHE="" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1" PROTECTEDFULLTITLECACHE="true" AUTHORSHIPCACHE="(Fabricius, 1798)" BINOMHYBRID="false" GENUSORUNINOMIAL="Acherontia" HYBRIDFORMULA="false" MONOMHYBRID="false" NAMECACHE="Acherontia lachesis" PROTECTEDAUTHORSHIPCACHE="true" PROTECTEDNAMECACHE="true" TRINOMHYBRID="false" ANAMORPHIC="false" HOMOTYPICALGROUP_ID="35" NOMENCLATURALREFERENCE_ID="1" RANK_ID="765"/>
+ <TAXONNAMEBASE DTYPE="ZoologicalName" ID="37" CREATED="2008-12-10 09:56:07.0" UUID="61b1dcae-8aa6-478a-bcd6-080cf0eb6ad7" UPDATED="2008-12-10 09:56:07.238" PROTECTEDTITLECACHE="true" TITLECACHE="Acherontia styx Westwood, 1847" FULLTITLECACHE="" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1" PROTECTEDFULLTITLECACHE="true" AUTHORSHIPCACHE="Westwood, 1847" BINOMHYBRID="false" GENUSORUNINOMIAL="Acherontia" HYBRIDFORMULA="false" MONOMHYBRID="false" NAMECACHE="Acherontia styx" PROTECTEDAUTHORSHIPCACHE="true" PROTECTEDNAMECACHE="true" TRINOMHYBRID="false" ANAMORPHIC="false" HOMOTYPICALGROUP_ID="36" NOMENCLATURALREFERENCE_ID="1" RANK_ID="765"/>
+ <TAXONNAMEBASE DTYPE="BotanicalName" ID="38" CREATED="2008-12-10 09:56:07.0" UUID="c9e7124b-2e60-4df2-996c-b7d024c85d33" UPDATED="2008-12-10 09:56:07.238" PROTECTEDTITLECACHE="true" TITLECACHE="Cryptocoryne x purpurea nothovar borneoensis N.Jacobsen, Bastm. & Yuji Sasaki" FULLTITLECACHE="" PARSINGPROBLEM="0" PROBLEMENDS="-1" PROBLEMSTARTS="-1" PROTECTEDFULLTITLECACHE="true" AUTHORSHIPCACHE="N.Jacobsen, Bastm. & Yuji Sasaki" BINOMHYBRID="true" GENUSORUNINOMIAL="Cryptocoryne" HYBRIDFORMULA="false" MONOMHYBRID="false" NAMECACHE="Cryptocoryne x purpurea nothovar borneoensis" PROTECTEDAUTHORSHIPCACHE="true" PROTECTEDNAMECACHE="true" TRINOMHYBRID="false" ANAMORPHIC="false" HOMOTYPICALGROUP_ID="37" NOMENCLATURALREFERENCE_ID="1" RANK_ID="761"/>
+
+ <AUDITEVENT/>
+ <HIBERNATE_SEQUENCES SEQUENCE_NAME="DescriptionBase" NEXT_VAL="35"/>
+ <HIBERNATE_SEQUENCES SEQUENCE_NAME="TaxonBase" NEXT_VAL="39"/>
+
</dataset>
\ No newline at end of file
+++ /dev/null
-<?xml version='1.0' encoding='UTF-8'?>\r
-<dataset xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"\r
- xsi:noNamespaceSchemaLocation="../dataset.xsd">\r
-\r
- <!-- ********************* -->\r
- <!-- Validation results -->\r
- <!-- ********************* -->\r
- <ENTITYVALIDATION ID="1" CREATED="2014-01-01 00:00:00.0"\r
- UUID="dae5b090-30e8-45bc-9460-2eb2028d3c18" VALIDATEDENTITYID="100"\r
- VALIDATEDENTITYUUID="f8de74c6-aa56-4de3-931e-87b61da0218c"\r
- VALIDATEDENTITYCLASS="eu.etaxonomy.cdm.persistence.validation.Employee"\r
- VALIDATIONCOUNT="6"\r
- />\r
-\r
- <!-- ********************* -->\r
- <!-- Constraint violations -->\r
- <!-- ********************* -->\r
- <ENTITYCONSTRAINTVIOLATION ID="38"\r
- CREATED="2014-01-01 00:00:00.0" UUID="358da71f-b646-4b79-b00e-dcb68b6425ba"\r
- ENTITYVALIDATION_ID="1" PROPERTYPATH="firstName" INVALIDVALUE="Bar"\r
- SEVERITY="Error" MESSAGE="Garbage In Garbage Out"\r
- VALIDATIONGROUP="eu.etaxonomy.cdm.validation.Level2"\r
- VALIDATOR="eu.etaxonomy.cdm.persistence.validation.GarbageValidator" />\r
-\r
-</dataset>
\ No newline at end of file
+++ /dev/null
-<?xml version='1.0' encoding='UTF-8'?>\r
-<dataset xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"\r
- xsi:noNamespaceSchemaLocation="../dataset.xsd">\r
-\r
- <!-- ********************* -->\r
- <!-- Validation results -->\r
- <!-- ********************* -->\r
- <ENTITYVALIDATION ID="1" CREATED="2014-01-01 00:00:00.0"\r
- UUID="dae5b090-30e8-45bc-9460-2eb2028d3c18" VALIDATEDENTITYID="100"\r
- VALIDATEDENTITYUUID="f8de74c6-aa56-4de3-931e-87b61da0218c"\r
- VALIDATEDENTITYCLASS="eu.etaxonomy.cdm.persistence.validation.Employee"\r
- VALIDATIONCOUNT="5"\r
- />\r
-\r
- <!-- ********************* -->\r
- <!-- Constraint violations -->\r
- <!-- ********************* -->\r
- <ENTITYCONSTRAINTVIOLATION ID="1"\r
- CREATED="2014-01-01 00:00:00.0" UUID="358da71f-b646-4b79-b00e-dcb68b6425ba"\r
- ENTITYVALIDATION_ID="1" PROPERTYPATH="firstName" INVALIDVALUE="Foo"\r
- SEVERITY="Error" MESSAGE="Garbage In Garbage Out"\r
- VALIDATIONGROUP="eu.etaxonomy.cdm.validation.Level2"\r
- VALIDATOR="eu.etaxonomy.cdm.persistence.validation.GarbageValidator" />\r
-\r
-</dataset>
\ No newline at end of file
+++ /dev/null
-<?xml version='1.0' encoding='UTF-8'?>\r
-<dataset xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"\r
- xsi:noNamespaceSchemaLocation="../dataset.xsd">\r
-\r
- <!-- ********************* -->\r
- <!-- Validation results -->\r
- <!-- ********************* -->\r
- <ENTITYVALIDATION ID="1" CREATED="2014-01-01 00:00:00.0"\r
- UUID="dae5b090-30e8-45bc-9460-2eb2028d3c18" VALIDATEDENTITYID="100"\r
- VALIDATEDENTITYUUID="f8de74c6-aa56-4de3-931e-87b61da0218c"\r
- VALIDATEDENTITYCLASS="eu.etaxonomy.cdm.persistence.validation.Employee"\r
- VALIDATIONCOUNT="6"\r
- />\r
-\r
- <!-- ********************* -->\r
- <!-- Constraint violations -->\r
- <!-- ********************* -->\r
- <ENTITYCONSTRAINTVIOLATION ID="1"\r
- CREATED="2014-01-01 00:00:00.0" UUID="358da71f-b646-4b79-b00e-dcb68b6425ba"\r
- ENTITYVALIDATION_ID="1" PROPERTYPATH="firstName" INVALIDVALUE="Foo"\r
- SEVERITY="Error" MESSAGE="Garbage In Garbage Out"\r
- VALIDATIONGROUP="eu.etaxonomy.cdm.validation.Level2"\r
- VALIDATOR="eu.etaxonomy.cdm.persistence.validation.GarbageValidator" />\r
-\r
-</dataset>
\ No newline at end of file
database.schemaNames=PUBLIC\r
org.dbunit.dataset.datatype.IDataTypeFactory.implClassName.h2=org.dbunit.ext.h2.H2DataTypeFactory\r
#org.dbunit.dataset.datatype.IDataTypeFactory.implClassName=org.dbunit.ext.h2.H2DataTypeFactory\r
-org.unitils.core.dbsupport.DbSupport.implClassName.h2=eu.etaxonomy.cdm.database.H2DbSupport\r
+#maybe we can replace this by org.unitils.core.dbsupport.H2DbSupport\r
+org.unitils.core.dbsupport.DbSupport.implClassName.h2=eu.etaxonomy.cdm.database.H2DbSupport \r
database.storedIndentifierCase.h2=auto\r
database.identifierQuoteString.h2=auto\r
\r
#\r
HibernateModule.configuration.implClassName=org.hibernate.cfg.Configuration\r
\r
+## Same as defined in hibernate.cfg.xml.\r
+hibernate.connection.provider_class=org.hibernate.engine.jdbc.connections.internal.DatasourceConnectionProviderImpl\r
+\r
+#Created for hibernate 5 support, not yet available in standard unitils \r
+unitils.module.hibernate.className=org.unitils.orm.hibernate.Hibernate5Module\r
+\r
+#Since Hibernate 5 (or earlier > 4.1.10) using the Proxy does not correctly release \r
+#the connections to the pool after transaction finishes.\r
+dataSource.wrapInTransactionalProxy=false\r
<groupId>eu.etaxonomy</groupId>
<artifactId>cdmlib-persistence</artifactId>
</dependency>
- <dependency>
+<!-- <dependency> -->
+<!-- <groupId>eu.etaxonomy</groupId> -->
+<!-- <artifactId>cdmlib-test</artifactId> -->
+<!-- <scope>test</scope> -->
+<!-- </dependency> -->
+ <dependency>
<groupId>eu.etaxonomy</groupId>
<artifactId>cdmlib-persistence</artifactId>
<type>test-jar</type>
<scope>test</scope>
<version>${project.version}</version>
- </dependency>
+ </dependency>
<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-highlighter</artifactId>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-join</artifactId>
</dependency>
- <!-- OLD concept, not under development anymore,
- to be removed as soon as CdmMassIndexer is updated -->
<dependency>
- <groupId>org.apache.lucene</groupId>
- <artifactId>lucene-spellchecker</artifactId>
+ <groupId>org.apache.lucene</groupId>
+ <artifactId>lucene-suggest</artifactId>
</dependency>
-
<dependency>
<groupId>org.unitils</groupId>
<artifactId>unitils-dbunit</artifactId>
import org.springframework.beans.factory.annotation.Autowired;\r
import org.springframework.context.ApplicationContext;\r
import org.springframework.context.ApplicationContextAware;\r
-import org.springframework.orm.hibernate4.HibernateTransactionManager;\r
+import org.springframework.orm.hibernate5.HibernateTransactionManager;\r
import org.springframework.security.authentication.ProviderManager;\r
import org.springframework.security.authentication.UsernamePasswordAuthenticationToken;\r
import org.springframework.security.core.Authentication;\r
logger.debug("Isolation level = " + txDef.getIsolationLevel());\r
logger.debug("Timeout = " + txDef.getTimeout());\r
logger.debug("Read Only = " + txDef.isReadOnly());\r
- // org.springframework.orm.hibernate4.HibernateTransactionManager\r
+ // org.springframework.orm.hibernate5.HibernateTransactionManager\r
// provides more transaction/session-related debug information.\r
}\r
\r
import org.hibernate.SessionFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.jdbc.datasource.ConnectionHolder;
-import org.springframework.orm.hibernate4.SessionHolder;
+import org.springframework.orm.hibernate5.SessionHolder;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.TransactionDefinition;
import org.springframework.transaction.TransactionStatus;
* <p>
* This behaviour essentially revolves around the resources map in the {@link org.springframework.transaction.support.TransactionSynchronizationManager TransactionSynchronizationManager}.
* This resources map contains two entries of interest,
- * - (Autowired) {@link org.hibernate.SessionFactory} mapped to the {@link org.springframework.orm.hibernate4.SessionHolder}
+ * - (Autowired) {@link org.hibernate.SessionFactory} mapped to the {@link org.springframework.orm.hibernate5.SessionHolder}
* - (Autowired) {@link javax.sql.DataSource} mapped to the {@link org.springframework.jdbc.datasource.ConnectionHolder}
* <p>
* The SessionHolder object itself contains the {@link org.hibernate.Session Session} as well as the {@link org.hibernate.Transaction object.
* objects must not be null and the corresponding holders must have their 'synchronizedWithTransaction' flag set to true.
* <p>
* The default behaviour of the {@link org.springframework.transaction.PlatformTransactionManager PlatformTransactionManager} which in the CDM case is autowired
- * to {@link org.springframework.orm.hibernate4.HibernateTransactionManager HibernateTransactionManager}, is to check these entries
+ * to {@link org.springframework.orm.hibernate5.HibernateTransactionManager HibernateTransactionManager}, is to check these entries
* when starting a transaction. If this entries do not exist in the resource map then they are created, implying a new session, which
* is in fact how hibernate implements the default 'session-per-request' pattern internally.
* <p>
-package eu.etaxonomy.cdm.api.conversation;\r
-\r
-import javax.sql.DataSource;\r
-\r
-import org.hibernate.LockMode;\r
-import org.hibernate.Session;\r
-import org.hibernate.SessionFactory;\r
-import org.springframework.orm.hibernate4.SessionHolder;\r
-import org.springframework.transaction.TransactionDefinition;\r
-import org.springframework.transaction.TransactionStatus;\r
-\r
-public class ConversationHolderMock extends ConversationHolder {\r
-\r
-\r
- public ConversationHolderMock() {\r
-\r
- }\r
- /**\r
- * This method has to be called when starting a new unit-of-work. All required resources are\r
- * bound so that SessionFactory.getCurrentSession() returns the right session for this conversation\r
- */\r
- @Override\r
- public void bind() {\r
-\r
- }\r
-\r
- @Override\r
- public SessionHolder getSessionHolder(){\r
- return null;\r
- }\r
-\r
- /**\r
- * @return\r
- */\r
- private DataSource getDataSource() {\r
- return null;\r
- }\r
-\r
- /**\r
- * @return true if this longSession is bound to the session factory.\r
- */\r
- @Override\r
- public boolean isBound(){\r
- return false;\r
- }\r
-\r
- /**\r
- * Creates an instance of TransactionStatus and binds it to this conversation manager.\r
- * At the moment we allow only on transaction per conversation holder.\r
- *\r
- * @return the transaction status bound to this conversation holder\r
- */\r
- @Override\r
- public TransactionStatus startTransaction(){\r
- return null;\r
- }\r
-\r
- /**\r
- * @return if there is a running transaction\r
- */\r
- @Override\r
- public boolean isTransactionActive(){\r
- return false;\r
- }\r
-\r
- /* (non-Javadoc)\r
- * @see org.hibernate.Session#evict(java.lang.Object object)\r
- */\r
- @Override\r
- public void evict(Object object){\r
-\r
- }\r
-\r
- /* (non-Javadoc)\r
- * @see org.hibernate.Session#refresh(java.lang.Object object)\r
- */\r
- @Override\r
- public void refresh(Object object){\r
-\r
- }\r
-\r
- /* (non-Javadoc)\r
- * @see org.hibernate.Session#clear()\r
- */\r
- @Override\r
- public void clear(){\r
-\r
- }\r
-\r
- /**\r
- * Commit the running transaction.\r
- */\r
- @Override\r
- public void commit(){\r
-\r
- }\r
-\r
- /**\r
- * Commit the running transaction but optionally start a\r
- * new one right away.\r
- *\r
- * @param restartTransaction whether to start a new transaction\r
- */\r
- @Override\r
- public TransactionStatus commit(boolean restartTransaction){\r
- return null;\r
- }\r
-\r
- /**\r
- * @return the session associated with this conversation manager\r
- */\r
- @Override\r
- public Session getSession() {\r
- return null;\r
- }\r
-\r
- /**\r
- * @return the session factory that is bound to this conversation manager\r
- */\r
- @Override\r
- public SessionFactory getSessionFactory() {\r
- return null;\r
- }\r
-\r
- @Override\r
- public void delete(Object object){\r
-\r
- }\r
-\r
- /**\r
- * Facades Session.lock()\r
- */\r
- @Override\r
- public void lock(Object persistentObject, LockMode lockMode) {\r
-\r
- }\r
-\r
- @Override\r
- public void lock(String entityName, Object persistentObject, LockMode lockMode){\r
-\r
- }\r
-\r
- /**\r
- * @return the definition\r
- */\r
- @Override\r
- public TransactionDefinition getDefinition() {\r
- return null;\r
- }\r
-\r
- /**\r
- * @param definition the definition to set\r
- */\r
- @Override\r
- public void setDefinition(TransactionDefinition definition) {\r
-\r
- }\r
-\r
- /**\r
- * Register to get updated after any interaction with the datastore\r
- */\r
- @Override\r
- public void registerForDataStoreChanges(IConversationEnabled observer) {\r
-\r
- }\r
-\r
- /**\r
- * Register to get updated after any interaction with the datastore\r
- */\r
- @Override\r
- public void unregisterForDataStoreChanges(IConversationEnabled observer) {\r
-\r
- }\r
-\r
- /**\r
- * Free resources bound to this conversationHolder\r
- */\r
- @Override\r
- public void close(){\r
-\r
- }\r
-\r
- @Override\r
- public boolean isClosed(){\r
- return true;\r
- }\r
-\r
-}\r
+package eu.etaxonomy.cdm.api.conversation;
+
+import javax.sql.DataSource;
+
+import org.hibernate.LockMode;
+import org.hibernate.Session;
+import org.hibernate.SessionFactory;
+import org.springframework.orm.hibernate5.SessionHolder;
+import org.springframework.transaction.TransactionDefinition;
+import org.springframework.transaction.TransactionStatus;
+
+public class ConversationHolderMock extends ConversationHolder {
+
+
+ public ConversationHolderMock() {
+
+ }
+ /**
+ * This method has to be called when starting a new unit-of-work. All required resources are
+ * bound so that SessionFactory.getCurrentSession() returns the right session for this conversation
+ */
+ @Override
+ public void bind() {
+
+ }
+
+ @Override
+ public SessionHolder getSessionHolder(){
+ return null;
+ }
+
+ /**
+ * @return
+ */
+ private DataSource getDataSource() {
+ return null;
+ }
+
+ /**
+ * @return true if this longSession is bound to the session factory.
+ */
+ @Override
+ public boolean isBound(){
+ return false;
+ }
+
+ /**
+ * Creates an instance of TransactionStatus and binds it to this conversation manager.
+ * At the moment we allow only on transaction per conversation holder.
+ *
+ * @return the transaction status bound to this conversation holder
+ */
+ @Override
+ public TransactionStatus startTransaction(){
+ return null;
+ }
+
+ /**
+ * @return if there is a running transaction
+ */
+ @Override
+ public boolean isTransactionActive(){
+ return false;
+ }
+
+ /* (non-Javadoc)
+ * @see org.hibernate.Session#evict(java.lang.Object object)
+ */
+ @Override
+ public void evict(Object object){
+
+ }
+
+ /* (non-Javadoc)
+ * @see org.hibernate.Session#refresh(java.lang.Object object)
+ */
+ @Override
+ public void refresh(Object object){
+
+ }
+
+ /* (non-Javadoc)
+ * @see org.hibernate.Session#clear()
+ */
+ @Override
+ public void clear(){
+
+ }
+
+ /**
+ * Commit the running transaction.
+ */
+ @Override
+ public void commit(){
+
+ }
+
+ /**
+ * Commit the running transaction but optionally start a
+ * new one right away.
+ *
+ * @param restartTransaction whether to start a new transaction
+ */
+ @Override
+ public TransactionStatus commit(boolean restartTransaction){
+ return null;
+ }
+
+ /**
+ * @return the session associated with this conversation manager
+ */
+ @Override
+ public Session getSession() {
+ return null;
+ }
+
+ /**
+ * @return the session factory that is bound to this conversation manager
+ */
+ @Override
+ public SessionFactory getSessionFactory() {
+ return null;
+ }
+
+ @Override
+ public void delete(Object object){
+
+ }
+
+ /**
+ * Facades Session.lock()
+ */
+ @Override
+ public void lock(Object persistentObject, LockMode lockMode) {
+
+ }
+
+ @Override
+ public void lock(String entityName, Object persistentObject, LockMode lockMode){
+
+ }
+
+ /**
+ * @return the definition
+ */
+ @Override
+ public TransactionDefinition getDefinition() {
+ return null;
+ }
+
+ /**
+ * @param definition the definition to set
+ */
+ @Override
+ public void setDefinition(TransactionDefinition definition) {
+
+ }
+
+ /**
+ * Register to get updated after any interaction with the datastore
+ */
+ @Override
+ public void registerForDataStoreChanges(IConversationEnabled observer) {
+
+ }
+
+ /**
+ * Register to get updated after any interaction with the datastore
+ */
+ @Override
+ public void unregisterForDataStoreChanges(IConversationEnabled observer) {
+
+ }
+
+ /**
+ * Free resources bound to this conversationHolder
+ */
+ @Override
+ public void close(){
+
+ }
+
+ @Override
+ public boolean isClosed(){
+ return true;
+ }
+
+}
-// $Id$\r
-/**\r
-* Copyright (C) 2007 EDIT\r
-* European Distributed Institute of Taxonomy \r
-* http://www.e-taxonomy.eu\r
-* \r
-* The contents of this file are subject to the Mozilla Public License Version 1.1\r
-* See LICENSE.TXT at the top of this package for the full license terms.\r
-*/\r
-\r
-package eu.etaxonomy.cdm.api.service;\r
-\r
-import java.sql.Connection;\r
-import java.sql.ResultSet;\r
-import java.sql.SQLException;\r
-import java.sql.Statement;\r
-import java.util.HashMap;\r
-import java.util.Map;\r
-\r
-import javax.sql.DataSource;\r
-\r
-import org.apache.log4j.Logger;\r
-import org.hibernate.SessionFactory;\r
-import org.springframework.beans.BeansException;\r
-import org.springframework.beans.factory.annotation.Autowired;\r
-import org.springframework.context.ApplicationContext;\r
-import org.springframework.context.ApplicationContextAware;\r
-import org.springframework.jdbc.datasource.AbstractDriverBasedDataSource;\r
-import org.springframework.orm.hibernate4.SessionFactoryUtils;\r
-import org.springframework.stereotype.Service;\r
-import org.springframework.transaction.annotation.Transactional;\r
-\r
-import eu.etaxonomy.cdm.api.application.CdmApplicationController;\r
-import eu.etaxonomy.cdm.config.CdmPersistentSourceUtils;\r
-import eu.etaxonomy.cdm.config.CdmSourceException;\r
-import eu.etaxonomy.cdm.database.CdmDataSource;\r
-import eu.etaxonomy.cdm.database.CdmPersistentDataSource;\r
-import eu.etaxonomy.cdm.database.DataSourceNotFoundException;\r
-import eu.etaxonomy.cdm.database.DatabaseTypeEnum;\r
-import eu.etaxonomy.cdm.database.H2Mode;\r
-import eu.etaxonomy.cdm.database.ICdmDataSource;\r
-import eu.etaxonomy.cdm.model.common.init.TermNotFoundException;\r
-import eu.etaxonomy.cdm.model.metadata.CdmMetaData.MetaDataPropertyName;\r
-import eu.etaxonomy.cdm.model.name.NomenclaturalCode;\r
-\r
-\r
-\r
-/**\r
- * Implementation of service which provides functionality to directly access database \r
- * related information.\r
- * \r
- * @author a.mueller\r
- *\r
- */\r
-@Service\r
-@Transactional(readOnly = true)\r
-public class DatabaseServiceHibernateImpl implements IDatabaseService, ApplicationContextAware {\r
- private static final Logger logger = Logger.getLogger(DatabaseServiceHibernateImpl.class);\r
- \r
- private static final String TMP_DATASOURCE = "tmp"; \r
- \r
- @Autowired\r
- private SessionFactory factory;\r
- \r
- @Autowired\r
- protected ApplicationContext appContext;\r
- \r
- private CdmApplicationController application;\r
- \r
-\r
- \r
- \r
- /* (non-Javadoc)\r
- * @see eu.etaxonomy.cdm.api.service.IDatabaseService#setApplicationController(eu.etaxonomy.cdm.api.application.CdmApplicationController)\r
- */\r
- public void setApplicationController(CdmApplicationController cdmApplicationController){\r
- this.application = cdmApplicationController;\r
- }\r
- \r
- \r
- /* (non-Javadoc)\r
- * @see eu.etaxonomy.cdm.api.service.IDatabaseService#connectToDatasource(eu.etaxonomy.cdm.database.CdmDataSource)\r
- */\r
- public boolean connectToDatasource(CdmPersistentDataSource dataSource) throws TermNotFoundException{\r
- this.application.changeDataSource(dataSource);\r
- logger.debug("DataSource changed to " + dataSource.getName());\r
- return true;\r
- }\r
- \r
- /* (non-Javadoc)\r
- * @see eu.etaxonomy.cdm.api.service.IDatabaseService#connectToDatabase(eu.etaxonomy.cdm.database.DatabaseTypeEnum, java.lang.String, java.lang.String, java.lang.String, java.lang.String, int)\r
- */\r
- public boolean connectToDatabase(DatabaseTypeEnum databaseTypeEnum, String server,\r
- String database, String username, String password, int port, String filePath, H2Mode mode, NomenclaturalCode code) throws TermNotFoundException {\r
- ICdmDataSource dataSource = CdmDataSource.NewInstance(databaseTypeEnum, server, database, port, username, password, code);\r
- CdmPersistentDataSource tmpDataSource = saveDataSource(TMP_DATASOURCE, dataSource);\r
- boolean result = connectToDatasource(tmpDataSource);\r
- CdmPersistentSourceUtils.delete(tmpDataSource);\r
- return result;\r
- }\r
-\r
-\r
- /* (non-Javadoc)\r
- * @see eu.etaxonomy.cdm.api.service.IDatabaseService#connectToDatabase(eu.etaxonomy.cdm.database.DatabaseTypeEnum, java.lang.String, java.lang.String, java.lang.String, java.lang.String)\r
- */\r
- public boolean connectToDatabase(DatabaseTypeEnum databaseTypeEnum, String server,\r
- String database, String username, String password) throws TermNotFoundException {\r
- return connectToDatabase(databaseTypeEnum, server, database, username, password, databaseTypeEnum.getDefaultPort(), null, null, null) ;\r
- }\r
- \r
- /* (non-Javadoc)\r
- * @see eu.etaxonomy.cdm.api.service.IDatabaseService#saveDataSource(java.lang.String, eu.etaxonomy.cdm.database.ICdmDataSource)\r
- */\r
- public CdmPersistentDataSource saveDataSource(String strDataSourceName,\r
- ICdmDataSource dataSource) {\r
- return CdmPersistentDataSource.save(strDataSourceName, dataSource);\r
- }\r
- \r
- /* (non-Javadoc)\r
- * @see eu.etaxonomy.cdm.api.service.IDatabaseService#updateDataSource(java.lang.String, eu.etaxonomy.cdm.database.CdmPersistentDataSource)\r
- */\r
- public CdmPersistentDataSource updateDataSource(String strDataSourceName,\r
- CdmPersistentDataSource dataSource) throws DataSourceNotFoundException {\r
- return CdmPersistentDataSource.update(strDataSourceName, dataSource);\r
- }\r
-\r
- /* (non-Javadoc)\r
- * @see eu.etaxonomy.cdm.api.service.IDatabaseService#getUrl()\r
- */\r
- public String getUrl() {\r
- return getDataSource().getUrl();\r
- }\r
-\r
- /* (non-Javadoc)\r
- * @see eu.etaxonomy.cdm.api.service.IDatabaseService#getUsername()\r
- */\r
- public String getUsername() {\r
- return getDataSource().getUsername();\r
- }\r
-\r
- /**\r
- * Returns the AbstractDriverBasedDataSource from hibernate,\r
- * generalized in order to also allow using SimpleDriverDataSource.\r
- * \r
- * @return the AbstractDriverBasedDataSource from the hibernate layer \r
- */\r
- private AbstractDriverBasedDataSource getDataSource(){\r
- AbstractDriverBasedDataSource ds = (AbstractDriverBasedDataSource)SessionFactoryUtils.getDataSource(factory);\r
- return ds;\r
- }\r
-\r
-\r
- /* (non-Javadoc)\r
- * @see org.springframework.context.ApplicationContextAware#setApplicationContext(org.springframework.context.ApplicationContext)\r
- */\r
- public void setApplicationContext(ApplicationContext applicationContext)\r
- throws BeansException {\r
- this.appContext = applicationContext;\r
- }\r
- \r
- \r
- \r
- /* (non-Javadoc)\r
- * @see eu.etaxonomy.cdm.api.service.IDatabaseService#getDbSchemaVersion()\r
- */\r
- @Override\r
- public String getDbSchemaVersion() throws CdmSourceException { \r
- try {\r
- return (String)getSingleValue(MetaDataPropertyName.DB_SCHEMA_VERSION.getSqlQuery());\r
- } catch (SQLException e) {\r
- throw new CdmSourceException(e.getMessage()); \r
- }\r
- }\r
- \r
- \r
- /* (non-Javadoc)\r
- * @see eu.etaxonomy.cdm.api.service.IDatabaseService#isDbEmpty()\r
- */\r
- @Override\r
- public boolean isDbEmpty() throws CdmSourceException {\r
- // Any CDM DB should have a schema version\r
- String dbSchemaVersion = (String) getDbSchemaVersion(); \r
- return (dbSchemaVersion == null || dbSchemaVersion.equals(""));\r
- }\r
- \r
- /**\r
- * Execute a SQL query which returns a single value\r
- * \r
- * @param query , which returns a single value\r
- * @return\r
- * @throws SQLException\r
- */\r
- private Object getSingleValue(String query) throws SQLException {\r
- String queryString = query == null? "(null)": query;\r
- //ResultSet resultSet = executeQuery(query);\r
- ResultSet resultSet = null;\r
- \r
- Connection connection = SessionFactoryUtils.getDataSource(factory).getConnection();\r
- if (connection != null){\r
- \r
- Statement statement = connection.createStatement();\r
- resultSet = statement.executeQuery(query); \r
- \r
- if (resultSet == null || resultSet.next() == false){\r
- logger.info("No record returned for query " + queryString);\r
- return null;\r
- }\r
- if (resultSet.getMetaData().getColumnCount() != 1){\r
- logger.info("More than one column selected in query" + queryString);\r
- //first value will be taken\r
- }\r
- Object object = resultSet.getObject(1);\r
- if (resultSet.next()){\r
- logger.info("Multiple results for query " + queryString);\r
- //first row will be taken\r
- }\r
- // making sure we close all resources so we don't run out of\r
- // connections in the connection pool\r
- resultSet.close();\r
- statement.close();\r
- connection.close();\r
- \r
- return object;\r
- }else{\r
- throw new RuntimeException("Could not establish connection to database");\r
- }\r
-\r
- }\r
-\r
-\r
- @Override\r
- public Map<MetaDataPropertyName, String> getCdmMetadataMap() throws CdmSourceException {\r
- Map<MetaDataPropertyName, String> cdmMetaDataMap = new HashMap<MetaDataPropertyName, String>();\r
- \r
- for(MetaDataPropertyName mdpn : MetaDataPropertyName.values()){\r
- String value = null;\r
- try {\r
- value = (String)getSingleValue(mdpn.getSqlQuery());\r
- } catch (SQLException e) {\r
- throw new CdmSourceException(e.getMessage());\r
- } \r
- if(value != null) {\r
- cdmMetaDataMap.put(mdpn, value);\r
- }\r
- }\r
- return cdmMetaDataMap;\r
- }\r
-\r
-}\r
+// $Id$
+/**
+* Copyright (C) 2007 EDIT
+* European Distributed Institute of Taxonomy
+* http://www.e-taxonomy.eu
+*
+* The contents of this file are subject to the Mozilla Public License Version 1.1
+* See LICENSE.TXT at the top of this package for the full license terms.
+*/
+
+package eu.etaxonomy.cdm.api.service;
+
+import java.sql.Connection;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.log4j.Logger;
+import org.hibernate.SessionFactory;
+import org.springframework.beans.BeansException;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.context.ApplicationContext;
+import org.springframework.context.ApplicationContextAware;
+import org.springframework.jdbc.datasource.AbstractDriverBasedDataSource;
+import org.springframework.orm.hibernate5.SessionFactoryUtils;
+import org.springframework.stereotype.Service;
+import org.springframework.transaction.annotation.Transactional;
+
+import eu.etaxonomy.cdm.api.application.CdmApplicationController;
+import eu.etaxonomy.cdm.config.CdmPersistentSourceUtils;
+import eu.etaxonomy.cdm.config.CdmSourceException;
+import eu.etaxonomy.cdm.database.CdmDataSource;
+import eu.etaxonomy.cdm.database.CdmPersistentDataSource;
+import eu.etaxonomy.cdm.database.DataSourceNotFoundException;
+import eu.etaxonomy.cdm.database.DatabaseTypeEnum;
+import eu.etaxonomy.cdm.database.H2Mode;
+import eu.etaxonomy.cdm.database.ICdmDataSource;
+import eu.etaxonomy.cdm.model.common.init.TermNotFoundException;
+import eu.etaxonomy.cdm.model.metadata.CdmMetaData.MetaDataPropertyName;
+import eu.etaxonomy.cdm.model.name.NomenclaturalCode;
+
+
+
+/**
+ * Implementation of service which provides functionality to directly access database
+ * related information.
+ *
+ * @author a.mueller
+ *
+ */
+@Service
+@Transactional(readOnly = true)
+public class DatabaseServiceHibernateImpl implements IDatabaseService, ApplicationContextAware {
+ private static final Logger logger = Logger.getLogger(DatabaseServiceHibernateImpl.class);
+
+ private static final String TMP_DATASOURCE = "tmp";
+
+ @Autowired
+ private SessionFactory factory;
+
+ @Autowired
+ protected ApplicationContext appContext;
+
+ private CdmApplicationController application;
+
+
+
+
+ /* (non-Javadoc)
+ * @see eu.etaxonomy.cdm.api.service.IDatabaseService#setApplicationController(eu.etaxonomy.cdm.api.application.CdmApplicationController)
+ */
+ @Override
+ public void setApplicationController(CdmApplicationController cdmApplicationController){
+ this.application = cdmApplicationController;
+ }
+
+
+ /* (non-Javadoc)
+ * @see eu.etaxonomy.cdm.api.service.IDatabaseService#connectToDatasource(eu.etaxonomy.cdm.database.CdmDataSource)
+ */
+ @Override
+ public boolean connectToDatasource(CdmPersistentDataSource dataSource) throws TermNotFoundException{
+ this.application.changeDataSource(dataSource);
+ logger.debug("DataSource changed to " + dataSource.getName());
+ return true;
+ }
+
+ /* (non-Javadoc)
+ * @see eu.etaxonomy.cdm.api.service.IDatabaseService#connectToDatabase(eu.etaxonomy.cdm.database.DatabaseTypeEnum, java.lang.String, java.lang.String, java.lang.String, java.lang.String, int)
+ */
+ @Override
+ public boolean connectToDatabase(DatabaseTypeEnum databaseTypeEnum, String server,
+ String database, String username, String password, int port, String filePath, H2Mode mode, NomenclaturalCode code) throws TermNotFoundException {
+ ICdmDataSource dataSource = CdmDataSource.NewInstance(databaseTypeEnum, server, database, port, username, password, code);
+ CdmPersistentDataSource tmpDataSource = saveDataSource(TMP_DATASOURCE, dataSource);
+ boolean result = connectToDatasource(tmpDataSource);
+ CdmPersistentSourceUtils.delete(tmpDataSource);
+ return result;
+ }
+
+
+ /* (non-Javadoc)
+ * @see eu.etaxonomy.cdm.api.service.IDatabaseService#connectToDatabase(eu.etaxonomy.cdm.database.DatabaseTypeEnum, java.lang.String, java.lang.String, java.lang.String, java.lang.String)
+ */
+ @Override
+ public boolean connectToDatabase(DatabaseTypeEnum databaseTypeEnum, String server,
+ String database, String username, String password) throws TermNotFoundException {
+ return connectToDatabase(databaseTypeEnum, server, database, username, password, databaseTypeEnum.getDefaultPort(), null, null, null) ;
+ }
+
+ /* (non-Javadoc)
+ * @see eu.etaxonomy.cdm.api.service.IDatabaseService#saveDataSource(java.lang.String, eu.etaxonomy.cdm.database.ICdmDataSource)
+ */
+ @Override
+ public CdmPersistentDataSource saveDataSource(String strDataSourceName,
+ ICdmDataSource dataSource) {
+ return CdmPersistentDataSource.save(strDataSourceName, dataSource);
+ }
+
+ /* (non-Javadoc)
+ * @see eu.etaxonomy.cdm.api.service.IDatabaseService#updateDataSource(java.lang.String, eu.etaxonomy.cdm.database.CdmPersistentDataSource)
+ */
+ @Override
+ public CdmPersistentDataSource updateDataSource(String strDataSourceName,
+ CdmPersistentDataSource dataSource) throws DataSourceNotFoundException {
+ return CdmPersistentDataSource.update(strDataSourceName, dataSource);
+ }
+
+ /* (non-Javadoc)
+ * @see eu.etaxonomy.cdm.api.service.IDatabaseService#getUrl()
+ */
+ @Override
+ public String getUrl() {
+ return getDataSource().getUrl();
+ }
+
+ /* (non-Javadoc)
+ * @see eu.etaxonomy.cdm.api.service.IDatabaseService#getUsername()
+ */
+ @Override
+ public String getUsername() {
+ return getDataSource().getUsername();
+ }
+
+ /**
+ * Returns the AbstractDriverBasedDataSource from hibernate,
+ * generalized in order to also allow using SimpleDriverDataSource.
+ *
+ * @return the AbstractDriverBasedDataSource from the hibernate layer
+ */
+ private AbstractDriverBasedDataSource getDataSource(){
+ AbstractDriverBasedDataSource ds = (AbstractDriverBasedDataSource)SessionFactoryUtils.getDataSource(factory);
+ return ds;
+ }
+
+
+ /* (non-Javadoc)
+ * @see org.springframework.context.ApplicationContextAware#setApplicationContext(org.springframework.context.ApplicationContext)
+ */
+ @Override
+ public void setApplicationContext(ApplicationContext applicationContext)
+ throws BeansException {
+ this.appContext = applicationContext;
+ }
+
+
+
+ /* (non-Javadoc)
+ * @see eu.etaxonomy.cdm.api.service.IDatabaseService#getDbSchemaVersion()
+ */
+ @Override
+ public String getDbSchemaVersion() throws CdmSourceException {
+ try {
+ return (String)getSingleValue(MetaDataPropertyName.DB_SCHEMA_VERSION.getSqlQuery());
+ } catch (SQLException e) {
+ throw new CdmSourceException(e.getMessage());
+ }
+ }
+
+
+ /* (non-Javadoc)
+ * @see eu.etaxonomy.cdm.api.service.IDatabaseService#isDbEmpty()
+ */
+ @Override
+ public boolean isDbEmpty() throws CdmSourceException {
+ // Any CDM DB should have a schema version
+ String dbSchemaVersion = getDbSchemaVersion();
+ return (dbSchemaVersion == null || dbSchemaVersion.equals(""));
+ }
+
+ /**
+ * Execute a SQL query which returns a single value
+ *
+ * @param query , which returns a single value
+ * @return
+ * @throws SQLException
+ */
+ private Object getSingleValue(String query) throws SQLException {
+ String queryString = query == null? "(null)": query;
+ //ResultSet resultSet = executeQuery(query);
+ ResultSet resultSet = null;
+
+ Connection connection = SessionFactoryUtils.getDataSource(factory).getConnection();
+ if (connection != null){
+
+ Statement statement = connection.createStatement();
+ resultSet = statement.executeQuery(query);
+
+ if (resultSet == null || resultSet.next() == false){
+ logger.info("No record returned for query " + queryString);
+ return null;
+ }
+ if (resultSet.getMetaData().getColumnCount() != 1){
+ logger.info("More than one column selected in query" + queryString);
+ //first value will be taken
+ }
+ Object object = resultSet.getObject(1);
+ if (resultSet.next()){
+ logger.info("Multiple results for query " + queryString);
+ //first row will be taken
+ }
+ // making sure we close all resources so we don't run out of
+ // connections in the connection pool
+ resultSet.close();
+ statement.close();
+ connection.close();
+
+ return object;
+ }else{
+ throw new RuntimeException("Could not establish connection to database");
+ }
+
+ }
+
+
+ @Override
+ public Map<MetaDataPropertyName, String> getCdmMetadataMap() throws CdmSourceException {
+ Map<MetaDataPropertyName, String> cdmMetaDataMap = new HashMap<MetaDataPropertyName, String>();
+
+ for(MetaDataPropertyName mdpn : MetaDataPropertyName.values()){
+ String value = null;
+ try {
+ value = (String)getSingleValue(mdpn.getSqlQuery());
+ } catch (SQLException e) {
+ throw new CdmSourceException(e.getMessage());
+ }
+ if(value != null) {
+ cdmMetaDataMap.put(mdpn, value);
+ }
+ }
+ return cdmMetaDataMap;
+ }
+
+}
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.search.Search;
import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.orm.hibernate4.HibernateTransactionManager;
+import org.springframework.orm.hibernate5.HibernateTransactionManager;
import org.springframework.stereotype.Service;
import org.springframework.transaction.TransactionDefinition;
import org.springframework.transaction.TransactionStatus;
logger.trace("Isolation level = " + txDef.getIsolationLevel());
logger.trace("Timeout = " + txDef.getTimeout());
logger.trace("Read Only = " + txDef.isReadOnly());
- // org.springframework.orm.hibernate4.HibernateTransactionManager
+ // org.springframework.orm.hibernate5.HibernateTransactionManager
// provides more transaction/session-related debug information.
}
import org.hibernate.search.indexes.spi.DirectoryBasedIndexManager;
import org.hibernate.search.indexes.spi.IndexManager;
import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.orm.hibernate4.HibernateTransactionManager;
+import org.springframework.orm.hibernate5.HibernateTransactionManager;
import org.springframework.stereotype.Component;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.annotation.Transactional;
import eu.etaxonomy.cdm.common.monitor.NullProgressMonitor;
import eu.etaxonomy.cdm.common.monitor.RestServiceProgressMonitor;
import eu.etaxonomy.cdm.common.monitor.SubProgressMonitor;
-import eu.etaxonomy.cdm.config.Configuration;
import eu.etaxonomy.cdm.model.common.CdmBase;
import eu.etaxonomy.cdm.model.description.DescriptionElementBase;
import eu.etaxonomy.cdm.model.name.NonViralName;
String indexedField = itr.next();
logger.info("creating dictionary for field " + indexedField);
Dictionary dictionary = new LuceneDictionary(indexReader, indexedField);
- IndexWriterConfig iwc = new IndexWriterConfig(Configuration.luceneVersion, searchFactory.getAnalyzer(type));
+ IndexWriterConfig iwc = new IndexWriterConfig(searchFactory.getAnalyzer(type));
spellChecker.indexDictionary(dictionary, iwc, true);
}
subMonitor.internalWorked(1);
*/
package eu.etaxonomy.cdm.api.service.search;
+import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
readers.add(reader);
}
if(readers.size() > 1){
- MultiReader multireader = new MultiReader(readers.toArray(new IndexReader[readers.size()]), true);
+ IndexReader[] readersArray = readers.toArray(new IndexReader[readers.size()]);
+ MultiReader multireader;
+ try {
+ multireader = new MultiReader(readersArray, true);
+ } catch (IOException e) {
+ //or do we want to force clients to handle the IOs?
+ throw new RuntimeException(e);
+ }
searcher = new IndexSearcher(multireader);
} else {
searcher = new IndexSearcher(readers.get(0));
-// $Id$\r
-/**\r
- * Copyright (C) 2015 EDIT\r
- * European Distributed Institute of Taxonomy\r
- * http://www.e-taxonomy.eu\r
- *\r
- * The contents of this file are subject to the Mozilla Public License Version 1.1\r
- * See LICENSE.TXT at the top of this package for the full license terms.\r
- */\r
-package eu.etaxonomy.cdm.api.validation.batch;\r
-\r
-import java.util.List;\r
-import java.util.Set;\r
-\r
-import javax.validation.ConstraintViolation;\r
-import javax.validation.Validation;\r
-import javax.validation.Validator;\r
-import javax.validation.ValidatorFactory;\r
-\r
-import org.apache.log4j.Logger;\r
-import org.hibernate.validator.HibernateValidator;\r
-import org.hibernate.validator.HibernateValidatorConfiguration;\r
-import org.springframework.context.ApplicationContext;\r
-import org.springframework.context.ApplicationContextAware;\r
-import org.springframework.orm.hibernate4.HibernateTransactionManager;\r
-import org.springframework.stereotype.Component;\r
-import org.springframework.transaction.PlatformTransactionManager;\r
-import org.springframework.transaction.TransactionDefinition;\r
-import org.springframework.transaction.TransactionStatus;\r
-import org.springframework.transaction.support.DefaultTransactionDefinition;\r
-\r
-import eu.etaxonomy.cdm.api.application.ICdmApplicationConfiguration;\r
-import eu.etaxonomy.cdm.api.service.ICommonService;\r
-import eu.etaxonomy.cdm.api.service.IEntityValidationService;\r
-import eu.etaxonomy.cdm.api.service.IService;\r
-import eu.etaxonomy.cdm.model.common.CdmBase;\r
-import eu.etaxonomy.cdm.model.common.ICdmBase;\r
-import eu.etaxonomy.cdm.model.validation.CRUDEventType;\r
-import eu.etaxonomy.cdm.persistence.dao.jdbc.validation.EntityValidationCrudJdbcImpl;\r
-import eu.etaxonomy.cdm.validation.Level2;\r
-import eu.etaxonomy.cdm.validation.Level3;\r
-\r
-/**\r
- * @author ayco_holleman\r
- * @author a.mueller\r
- * @date 27 jan. 2015\r
- *\r
- */\r
-@Component("batchValidator")\r
-public class BatchValidator implements Runnable, ApplicationContextAware {\r
-\r
- static final Class<?>[] DEFAULT_VALIDATION_GROUPS = new Class<?>[] { Level2.class, Level3.class };\r
-\r
- private static final Logger logger = Logger.getLogger(BatchValidator.class);\r
-\r
-\r
- private ICdmApplicationConfiguration repository;\r
-\r
- private ApplicationContext appContext;\r
-\r
- private Validator validator;\r
- private Class<?>[] validationGroups;\r
-\r
-\r
- @Override\r
- public void setApplicationContext(ApplicationContext appContext) {\r
- this.appContext = appContext;\r
- }\r
-\r
- @Override\r
- public void run() {\r
- Thread.currentThread().setPriority(1);\r
- initValidator();\r
- validate();\r
- }\r
-\r
- /**\r
- *\r
- */\r
- private void initValidator() {\r
- if (getValidator() == null){\r
- HibernateValidatorConfiguration config = Validation.byProvider(HibernateValidator.class).configure();\r
- ValidatorFactory factory = config.buildValidatorFactory();\r
- setValidator(factory.getValidator());\r
- }\r
- if (validationGroups == null) {\r
- validationGroups = DEFAULT_VALIDATION_GROUPS;\r
- }\r
- }\r
-\r
-\r
-\r
- private <T extends ICdmBase, S extends T> void validate() {\r
- logger.info("Starting batch validation");\r
-\r
- // Get service for saving errors to database\r
-// IEntityValidationService validationResultService = context.getEntityValidationService();\r
- IEntityValidationService entityValidationService = appContext.getBean(IEntityValidationService.class);\r
-\r
- EntityValidationCrudJdbcImpl jdbcPersister = appContext.getBean(EntityValidationCrudJdbcImpl.class);\r
-\r
- // Get all services dealing with "real" entities\r
- List<Class<CdmBase>> classesToValidate = BatchValidationUtil.getClassesToValidate();\r
-\r
- for (Class<CdmBase> entityClass : classesToValidate) {\r
- //TODO currently this seems to work only on the exact class, we may move it down\r
- //to single entity validation again but cache the information for each class\r
- if (true || BatchValidationUtil.isConstrainedEntityClass(validator, entityClass)){\r
-\r
- // ICommonService commonService = repository.getCommonService();\r
- ICommonService commonService = appContext.getBean(ICommonService.class);\r
- logger.info("Loading entities of type " + entityClass.getName());\r
- //false for saving validation results\r
- //TODO can we handle results in a different transaction?\r
- boolean readOnly = false;\r
- TransactionStatus txStatus = startTransaction(readOnly);\r
- handleSingleClass(commonService, entityClass, entityValidationService, jdbcPersister);\r
- commitTransaction(txStatus);\r
- }\r
- }\r
-\r
- logger.info("Batch validation complete");\r
- }\r
-\r
- /**\r
- * @param txStatus\r
- */\r
- private void commitTransaction(TransactionStatus txStatus) {\r
- PlatformTransactionManager txManager = getTransactionManager();\r
- txManager.commit(txStatus);\r
-\r
- }\r
-\r
- /**\r
- * @param readOnly\r
- * @return\r
- *\r
- */\r
- private TransactionStatus startTransaction(boolean readOnly) {\r
- PlatformTransactionManager txManager = getTransactionManager();\r
-\r
- DefaultTransactionDefinition defaultTxDef = new DefaultTransactionDefinition();\r
- defaultTxDef.setReadOnly(readOnly);\r
- TransactionDefinition txDef = defaultTxDef;\r
- TransactionStatus txStatus = txManager.getTransaction(txDef);\r
- return txStatus;\r
- }\r
-\r
- /**\r
- * @return\r
- */\r
- private PlatformTransactionManager getTransactionManager() {\r
- PlatformTransactionManager txManager = appContext.getBean(HibernateTransactionManager.class);\r
- return txManager;\r
- }\r
-\r
- private void handleSingleClass(ICommonService commonService, Class<CdmBase> entityClass, IEntityValidationService entityValidationService, EntityValidationCrudJdbcImpl jdbcPersister) {\r
- int n = commonService.count(entityClass);\r
- int pageSize = 1000;\r
- for (int page = 0; page < n ; page = page + pageSize ){\r
- handlePage(commonService, entityClass, entityValidationService, jdbcPersister,\r
- page/pageSize, pageSize);\r
- }\r
- }\r
-\r
-\r
- /**\r
- * @param commonService\r
- * @param entityClass\r
- * @param entityValidationService\r
- * @param jdbcPersister\r
- *\r
- */\r
- private void handlePage(ICommonService commonService, Class<CdmBase> entityClass, IEntityValidationService entityValidationService, EntityValidationCrudJdbcImpl jdbcPersister, int start, int pageSize) {\r
-\r
- List<CdmBase> entities;\r
-\r
- try {\r
-// commonService.count()\r
- entities = commonService.list(entityClass, pageSize, 0, null, null);\r
- } catch (Throwable t) {\r
- //TODO handle exception\r
- logger.error("Failed to load entities", t);\r
- return;\r
- }\r
- for (CdmBase entity : entities) {\r
- try {\r
- Set<ConstraintViolation<CdmBase>> errors = getValidator().validate(entity, getValidationGroups());\r
- if (errors.size() != 0) {\r
- if (logger.isInfoEnabled()){logger.info(errors.size() + " constraint violation(s) detected in entity " + entity.toString());}\r
-// entityValidationService.saveEntityValidation(entity, errors, CRUDEventType.NONE,\r
-// getValidationGroups());\r
-\r
- jdbcPersister.saveEntityValidation(entity, errors, CRUDEventType.NONE, getValidationGroups());\r
- }\r
- } catch (Exception e) {\r
- // TODO Exception handling\r
- e.printStackTrace();\r
- }\r
- }\r
-\r
- }\r
-\r
- private <T extends ICdmBase, S extends T> void validate_old() {\r
- logger.info("Starting batch validation");\r
-\r
- if (validationGroups == null) {\r
- validationGroups = DEFAULT_VALIDATION_GROUPS;\r
- }\r
-\r
- // Get service for saving errors to database\r
- IEntityValidationService validationResultService = repository.getEntityValidationService();\r
-\r
- // Get all services dealing with "real" entities\r
- List<EntityValidationUnit<T, S>> validationUnits = BatchValidationUtil.getAvailableServices(repository);\r
-\r
- for (EntityValidationUnit<T, S> unit : validationUnits) {\r
- Class<S> entityClass = unit.getEntityClass();\r
- IService<T> entityLoader = unit.getEntityLoader();\r
- logger.info("Loading entities of type " + entityClass.getName());\r
- List<S> entities;\r
- try {\r
- entities = entityLoader.list(entityClass, 0, 0, null, null);\r
- } catch (Throwable t) {\r
- logger.error("Failed to load entities", t);\r
- continue;\r
- }\r
- for (S entity : entities) {\r
- if (BatchValidationUtil.isConstrainedEntityClass(getValidator(), entity.getClass())) {\r
- Set<ConstraintViolation<S>> errors = getValidator().validate(entity, validationGroups);\r
- if (errors.size() != 0) {\r
- logger.warn(errors.size() + " error(s) detected in entity " + entity.toString());\r
- validationResultService.saveEntityValidation(entity, errors, CRUDEventType.NONE,\r
- validationGroups);\r
- }\r
- }\r
- }\r
- }\r
-\r
- logger.info("Batch validation complete");\r
- }\r
-\r
- /**\r
- * Get the application context that will provide the services that will, on\r
- * their turn, provide the entities to be validated.\r
- *\r
- * @return The application context\r
- */\r
- public ICdmApplicationConfiguration getAppController() {\r
- return repository;\r
- }\r
-\r
- /**\r
- * Set the application context.\r
- *\r
- * @param context\r
- * The application context\r
- */\r
- public void setAppController(ICdmApplicationConfiguration context) {\r
- this.repository = context;\r
- }\r
-\r
- /**\r
- * Get the {@code Validator} instance that will carry out the validations.\r
- *\r
- * @return The {@code Validator}\r
- */\r
- public Validator getValidator() {\r
- return validator;\r
- }\r
-\r
- /**\r
- * Set the {@code Validator} instance that will carry out the validations.\r
- *\r
- * @param validator\r
- * The {@code Validator}\r
- */\r
- public void setValidator(Validator validator) {\r
- this.validator = validator;\r
- }\r
-\r
- /**\r
- * Get the validation groups to be applied by the {@code Validator}.\r
- *\r
- * @return The validation groups\r
- */\r
- public Class<?>[] getValidationGroups() {\r
- return validationGroups;\r
- }\r
-\r
- /**\r
- * Set the validation groups to be applied by the {@code Validator}. By\r
- * default all Level2 and Level3 will be checked. So if that is what you\r
- * want, you do not need to call this method before calling {@link #run()}.\r
- *\r
- * @param validationGroups\r
- * The validation groups\r
- */\r
- public void setValidationGroups(Class<?>... validationGroups) {\r
- this.validationGroups = validationGroups;\r
- }\r
-\r
-}\r
+// $Id$
+/**
+ * Copyright (C) 2015 EDIT
+ * European Distributed Institute of Taxonomy
+ * http://www.e-taxonomy.eu
+ *
+ * The contents of this file are subject to the Mozilla Public License Version 1.1
+ * See LICENSE.TXT at the top of this package for the full license terms.
+ */
+package eu.etaxonomy.cdm.api.validation.batch;
+
+import java.util.List;
+import java.util.Set;
+
+import javax.validation.ConstraintViolation;
+import javax.validation.Validation;
+import javax.validation.Validator;
+import javax.validation.ValidatorFactory;
+
+import org.apache.log4j.Logger;
+import org.hibernate.validator.HibernateValidator;
+import org.hibernate.validator.HibernateValidatorConfiguration;
+import org.springframework.context.ApplicationContext;
+import org.springframework.context.ApplicationContextAware;
+import org.springframework.orm.hibernate5.HibernateTransactionManager;
+import org.springframework.stereotype.Component;
+import org.springframework.transaction.PlatformTransactionManager;
+import org.springframework.transaction.TransactionDefinition;
+import org.springframework.transaction.TransactionStatus;
+import org.springframework.transaction.support.DefaultTransactionDefinition;
+
+import eu.etaxonomy.cdm.api.application.ICdmApplicationConfiguration;
+import eu.etaxonomy.cdm.api.service.ICommonService;
+import eu.etaxonomy.cdm.api.service.IEntityValidationService;
+import eu.etaxonomy.cdm.api.service.IService;
+import eu.etaxonomy.cdm.model.common.CdmBase;
+import eu.etaxonomy.cdm.model.common.ICdmBase;
+import eu.etaxonomy.cdm.model.validation.CRUDEventType;
+import eu.etaxonomy.cdm.persistence.dao.jdbc.validation.EntityValidationCrudJdbcImpl;
+import eu.etaxonomy.cdm.validation.Level2;
+import eu.etaxonomy.cdm.validation.Level3;
+
+/**
+ * @author ayco_holleman
+ * @author a.mueller
+ * @date 27 jan. 2015
+ *
+ */
+@Component("batchValidator")
+public class BatchValidator implements Runnable, ApplicationContextAware {
+
+ static final Class<?>[] DEFAULT_VALIDATION_GROUPS = new Class<?>[] { Level2.class, Level3.class };
+
+ private static final Logger logger = Logger.getLogger(BatchValidator.class);
+
+
+ private ICdmApplicationConfiguration repository;
+
+ private ApplicationContext appContext;
+
+ private Validator validator;
+ private Class<?>[] validationGroups;
+
+
+ @Override
+ public void setApplicationContext(ApplicationContext appContext) {
+ this.appContext = appContext;
+ }
+
+ @Override
+ public void run() {
+ Thread.currentThread().setPriority(1);
+ initValidator();
+ validate();
+ }
+
+ /**
+ *
+ */
+ private void initValidator() {
+ if (getValidator() == null){
+ HibernateValidatorConfiguration config = Validation.byProvider(HibernateValidator.class).configure();
+ ValidatorFactory factory = config.buildValidatorFactory();
+ setValidator(factory.getValidator());
+ }
+ if (validationGroups == null) {
+ validationGroups = DEFAULT_VALIDATION_GROUPS;
+ }
+ }
+
+
+
+ private <T extends ICdmBase, S extends T> void validate() {
+ logger.info("Starting batch validation");
+
+ // Get service for saving errors to database
+// IEntityValidationService validationResultService = context.getEntityValidationService();
+ IEntityValidationService entityValidationService = appContext.getBean(IEntityValidationService.class);
+
+ EntityValidationCrudJdbcImpl jdbcPersister = appContext.getBean(EntityValidationCrudJdbcImpl.class);
+
+ // Get all services dealing with "real" entities
+ List<Class<CdmBase>> classesToValidate = BatchValidationUtil.getClassesToValidate();
+
+ for (Class<CdmBase> entityClass : classesToValidate) {
+ //TODO currently this seems to work only on the exact class, we may move it down
+ //to single entity validation again but cache the information for each class
+ if (true || BatchValidationUtil.isConstrainedEntityClass(validator, entityClass)){
+
+ // ICommonService commonService = repository.getCommonService();
+ ICommonService commonService = appContext.getBean(ICommonService.class);
+ logger.info("Loading entities of type " + entityClass.getName());
+ //false for saving validation results
+ //TODO can we handle results in a different transaction?
+ boolean readOnly = false;
+ TransactionStatus txStatus = startTransaction(readOnly);
+ handleSingleClass(commonService, entityClass, entityValidationService, jdbcPersister);
+ commitTransaction(txStatus);
+ }
+ }
+
+ logger.info("Batch validation complete");
+ }
+
+ /**
+ * @param txStatus
+ */
+ private void commitTransaction(TransactionStatus txStatus) {
+ PlatformTransactionManager txManager = getTransactionManager();
+ txManager.commit(txStatus);
+
+ }
+
+ /**
+ * @param readOnly
+ * @return
+ *
+ */
+ private TransactionStatus startTransaction(boolean readOnly) {
+ PlatformTransactionManager txManager = getTransactionManager();
+
+ DefaultTransactionDefinition defaultTxDef = new DefaultTransactionDefinition();
+ defaultTxDef.setReadOnly(readOnly);
+ TransactionDefinition txDef = defaultTxDef;
+ TransactionStatus txStatus = txManager.getTransaction(txDef);
+ return txStatus;
+ }
+
+ /**
+ * @return
+ */
+ private PlatformTransactionManager getTransactionManager() {
+ PlatformTransactionManager txManager = appContext.getBean(HibernateTransactionManager.class);
+ return txManager;
+ }
+
+ private void handleSingleClass(ICommonService commonService, Class<CdmBase> entityClass, IEntityValidationService entityValidationService, EntityValidationCrudJdbcImpl jdbcPersister) {
+ int n = commonService.count(entityClass);
+ int pageSize = 1000;
+ for (int page = 0; page < n ; page = page + pageSize ){
+ handlePage(commonService, entityClass, entityValidationService, jdbcPersister,
+ page/pageSize, pageSize);
+ }
+ }
+
+
+ /**
+ * @param commonService
+ * @param entityClass
+ * @param entityValidationService
+ * @param jdbcPersister
+ *
+ */
+ private void handlePage(ICommonService commonService, Class<CdmBase> entityClass, IEntityValidationService entityValidationService, EntityValidationCrudJdbcImpl jdbcPersister, int start, int pageSize) {
+
+ List<CdmBase> entities;
+
+ try {
+// commonService.count()
+ entities = commonService.list(entityClass, pageSize, 0, null, null);
+ } catch (Throwable t) {
+ //TODO handle exception
+ logger.error("Failed to load entities", t);
+ return;
+ }
+ for (CdmBase entity : entities) {
+ try {
+ Set<ConstraintViolation<CdmBase>> errors = getValidator().validate(entity, getValidationGroups());
+ if (errors.size() != 0) {
+ if (logger.isInfoEnabled()){logger.info(errors.size() + " constraint violation(s) detected in entity " + entity.toString());}
+// entityValidationService.saveEntityValidation(entity, errors, CRUDEventType.NONE,
+// getValidationGroups());
+
+ jdbcPersister.saveEntityValidation(entity, errors, CRUDEventType.NONE, getValidationGroups());
+ }
+ } catch (Exception e) {
+ // TODO Exception handling
+ e.printStackTrace();
+ }
+ }
+
+ }
+
+ private <T extends ICdmBase, S extends T> void validate_old() {
+ logger.info("Starting batch validation");
+
+ if (validationGroups == null) {
+ validationGroups = DEFAULT_VALIDATION_GROUPS;
+ }
+
+ // Get service for saving errors to database
+ IEntityValidationService validationResultService = repository.getEntityValidationService();
+
+ // Get all services dealing with "real" entities
+ List<EntityValidationUnit<T, S>> validationUnits = BatchValidationUtil.getAvailableServices(repository);
+
+ for (EntityValidationUnit<T, S> unit : validationUnits) {
+ Class<S> entityClass = unit.getEntityClass();
+ IService<T> entityLoader = unit.getEntityLoader();
+ logger.info("Loading entities of type " + entityClass.getName());
+ List<S> entities;
+ try {
+ entities = entityLoader.list(entityClass, 0, 0, null, null);
+ } catch (Throwable t) {
+ logger.error("Failed to load entities", t);
+ continue;
+ }
+ for (S entity : entities) {
+ if (BatchValidationUtil.isConstrainedEntityClass(getValidator(), entity.getClass())) {
+ Set<ConstraintViolation<S>> errors = getValidator().validate(entity, validationGroups);
+ if (errors.size() != 0) {
+ logger.warn(errors.size() + " error(s) detected in entity " + entity.toString());
+ validationResultService.saveEntityValidation(entity, errors, CRUDEventType.NONE,
+ validationGroups);
+ }
+ }
+ }
+ }
+
+ logger.info("Batch validation complete");
+ }
+
+ /**
+ * Get the application context that will provide the services that will, on
+ * their turn, provide the entities to be validated.
+ *
+ * @return The application context
+ */
+ public ICdmApplicationConfiguration getAppController() {
+ return repository;
+ }
+
+ /**
+ * Set the application context.
+ *
+ * @param context
+ * The application context
+ */
+ public void setAppController(ICdmApplicationConfiguration context) {
+ this.repository = context;
+ }
+
+ /**
+ * Get the {@code Validator} instance that will carry out the validations.
+ *
+ * @return The {@code Validator}
+ */
+ public Validator getValidator() {
+ return validator;
+ }
+
+ /**
+ * Set the {@code Validator} instance that will carry out the validations.
+ *
+ * @param validator
+ * The {@code Validator}
+ */
+ public void setValidator(Validator validator) {
+ this.validator = validator;
+ }
+
+ /**
+ * Get the validation groups to be applied by the {@code Validator}.
+ *
+ * @return The validation groups
+ */
+ public Class<?>[] getValidationGroups() {
+ return validationGroups;
+ }
+
+ /**
+ * Set the validation groups to be applied by the {@code Validator}. By
+ * default all Level2 and Level3 will be checked. So if that is what you
+ * want, you do not need to call this method before calling {@link #run()}.
+ *
+ * @param validationGroups
+ * The validation groups
+ */
+ public void setValidationGroups(Class<?>... validationGroups) {
+ this.validationGroups = validationGroups;
+ }
+
+}
-\r
-package eu.etaxonomy.cdm.api.service;\r
-\r
-import static org.junit.Assert.assertEquals;\r
-import static org.junit.Assert.assertFalse;\r
-import static org.junit.Assert.assertTrue;\r
-\r
-import java.io.FileNotFoundException;\r
-import java.util.Iterator;\r
-import java.util.Set;\r
-import java.util.UUID;\r
-\r
-import javax.sql.DataSource;\r
-\r
-import org.junit.Assert;\r
-import org.apache.log4j.Logger;\r
-import org.junit.Before;\r
-import org.junit.Ignore;\r
-import org.junit.Test;\r
-import org.junit.runner.RunWith;\r
-import org.springframework.security.authentication.AuthenticationManager;\r
-import org.springframework.security.authentication.UsernamePasswordAuthenticationToken;\r
-import org.springframework.security.core.Authentication;\r
-import org.springframework.security.core.context.SecurityContext;\r
-import org.springframework.security.core.context.SecurityContextHolder;\r
-import org.unitils.UnitilsJUnit4TestClassRunner;\r
-import org.unitils.database.annotations.TestDataSource;\r
-import org.unitils.database.annotations.Transactional;\r
-import org.unitils.database.util.TransactionMode;\r
-import org.unitils.dbunit.annotation.DataSet;\r
-import org.unitils.spring.annotation.SpringApplicationContext;\r
-import org.unitils.spring.annotation.SpringBeanByName;\r
-\r
-import eu.etaxonomy.cdm.api.service.UpdateResult.Status;\r
-import eu.etaxonomy.cdm.api.service.exception.DataChangeNoRollbackException;\r
-import eu.etaxonomy.cdm.model.description.Feature;\r
-import eu.etaxonomy.cdm.model.description.TaxonDescription;\r
-import eu.etaxonomy.cdm.model.description.TextData;\r
-import eu.etaxonomy.cdm.model.media.Media;\r
-import eu.etaxonomy.cdm.model.taxon.Taxon;\r
-import eu.etaxonomy.cdm.model.taxon.TaxonNode;\r
-import eu.etaxonomy.cdm.persistence.hibernate.permission.CdmPermissionEvaluator;\r
-import eu.etaxonomy.cdm.test.integration.CdmTransactionalIntegrationTestWithSecurity;\r
-\r
-\r
-\r
-/**\r
- * Test class only for development purposes, must be run in suite.\r
- *\r
- */\r
-//@RunWith(UnitilsJUnit4TestClassRunner.class)\r
-//@SpringApplicationContext({"/eu/etaxonomy/cdm/applicationContextSecurity.xml"})\r
-//@Transactional\r
-@Ignore // should be ignored\r
-@DataSet("SecurityTest.xml")\r
-public class SecurityWithTransaction extends CdmTransactionalIntegrationTestWithSecurity {\r
-\r
- private static final Logger logger = Logger.getLogger(SecurityWithTransaction.class);\r
-\r
- @SpringBeanByName\r
- private ITaxonService taxonService;\r
-\r
- @SpringBeanByName\r
- private IDescriptionService descriptionService;\r
-\r
- @SpringBeanByName\r
- private ITaxonNodeService taxonNodeService;\r
-\r
- @SpringBeanByName\r
- private IUserService userService;\r
-\r
-\r
- @TestDataSource\r
- protected DataSource dataSource;\r
-\r
- private Authentication authentication;\r
-\r
- @SpringBeanByName\r
- private AuthenticationManager authenticationManager;\r
-\r
- @SpringBeanByName\r
- private CdmPermissionEvaluator permissionEvaluator;\r
-\r
- private UsernamePasswordAuthenticationToken token;\r
-\r
-\r
- @Before\r
- public void setUp(){\r
- token = new UsernamePasswordAuthenticationToken("admin", "sPePhAz6");\r
- }\r
-\r
- @Test\r
- public void testDeleteTaxon(){\r
- token = new UsernamePasswordAuthenticationToken("taxonomist", "test4");\r
- authentication = authenticationManager.authenticate(token);\r
- SecurityContext context = SecurityContextHolder.getContext();\r
- context.setAuthentication(authentication);\r
- Taxon actualTaxon = (Taxon)taxonService.find(UUID.fromString("7b8b5cb3-37ba-4dba-91ac-4c6ffd6ac331"));\r
-\r
- //try {\r
- DeleteResult result = taxonService.deleteTaxon(actualTaxon.getUuid(), null, null);\r
- /*} catch (DataChangeNoRollbackException e) {\r
- Assert.fail();\r
- }*/\r
- if (!result.isOk()){\r
- Assert.fail();\r
- }\r
- }\r
-\r
-\r
- @Test\r
- public void testSaveOrUpdateDescription(){\r
-\r
- authentication = authenticationManager.authenticate(new UsernamePasswordAuthenticationToken("descriptionEditor", "test"));\r
- SecurityContext context = SecurityContextHolder.getContext();\r
- context.setAuthentication(authentication);\r
- /*Taxon taxon = (Taxon) taxonService.load(UUID.fromString("7b8b5cb3-37ba-4dba-91ac-4c6ffd6ac331"));\r
-\r
- Set<TaxonDescription> descriptions = taxon.getDescriptions();\r
-\r
- Iterator<TaxonDescription> iterator = descriptions.iterator();\r
-\r
- TaxonDescription description = iterator.next();*/\r
- TaxonDescription description = (TaxonDescription) descriptionService.find(UUID.fromString("eb17b80a-9be6-4642-a6a8-b19a318925e6"));\r
-\r
- TextData textData = new TextData();\r
- textData.setFeature(Feature.ECOLOGY());\r
- Media media = Media.NewInstance();\r
- textData.addMedia(media);\r
-\r
-\r
-\r
- //descriptionService.saveDescriptionElement(textData);\r
- description.addElement(textData);\r
-\r
- descriptionService.saveOrUpdate(description);\r
-\r
- Taxon taxon = (Taxon) taxonService.find(UUID.fromString("7b8b5cb3-37ba-4dba-91ac-4c6ffd6ac331"));\r
- Set<TaxonDescription> descriptions = taxon.getDescriptions();\r
-\r
- Iterator<TaxonDescription> iterator = descriptions.iterator();\r
-\r
- description = iterator.next();\r
- assertEquals(1, descriptions.size());\r
- assertEquals(2,description.getElements().size());\r
-\r
-\r
-\r
- }\r
-\r
- @Test\r
- public void testAllowOnlyAccessToPartOfTree(){\r
- authentication = authenticationManager.authenticate(new UsernamePasswordAuthenticationToken("partEditor", "test4"));\r
- SecurityContext context = SecurityContextHolder.getContext();\r
- context.setAuthentication(authentication);\r
-\r
- Taxon tribe = (Taxon)taxonService.find(UUID.fromString("928a0167-98cd-4555-bf72-52116d067625"));\r
- Taxon taxon = (Taxon)taxonService.find(UUID.fromString("bc09aca6-06fd-4905-b1e7-cbf7cc65d783"));\r
- Iterator<TaxonNode> it = tribe.getTaxonNodes().iterator();\r
- TaxonNode node = it.next();\r
-\r
- assertFalse(permissionEvaluator.hasPermission(authentication, node, "UPDATE"));\r
- node = node.getChildNodes().iterator().next();\r
-\r
- System.err.println(node.getUuid());\r
-\r
- assertTrue(permissionEvaluator.hasPermission(authentication, node, "UPDATE"));\r
- node = node.getChildNodes().iterator().next();\r
- assertTrue(permissionEvaluator.hasPermission(authentication, node, "UPDATE"));\r
- TaxonDescription description = TaxonDescription.NewInstance(taxon);\r
-\r
- taxonNodeService.saveOrUpdate(node);\r
- assertFalse(permissionEvaluator.hasPermission(authentication, description, "UPDATE"));\r
-\r
-\r
- }\r
-\r
- /* (non-Javadoc)\r
- * @see eu.etaxonomy.cdm.test.integration.CdmIntegrationTest#createTestData()\r
- */\r
- @Override\r
- public void createTestDataSet() throws FileNotFoundException {\r
- // TODO Auto-generated method stub\r
- \r
- }\r
-\r
-\r
-}\r
+
+package eu.etaxonomy.cdm.api.service;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
+import java.io.FileNotFoundException;
+import java.util.Iterator;
+import java.util.Set;
+import java.util.UUID;
+
+import javax.sql.DataSource;
+
+import org.apache.log4j.Logger;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Ignore;
+import org.junit.Test;
+import org.springframework.security.authentication.AuthenticationManager;
+import org.springframework.security.authentication.UsernamePasswordAuthenticationToken;
+import org.springframework.security.core.Authentication;
+import org.springframework.security.core.context.SecurityContext;
+import org.springframework.security.core.context.SecurityContextHolder;
+import org.unitils.database.annotations.TestDataSource;
+import org.unitils.dbunit.annotation.DataSet;
+import org.unitils.spring.annotation.SpringBeanByName;
+
+import eu.etaxonomy.cdm.model.description.Feature;
+import eu.etaxonomy.cdm.model.description.TaxonDescription;
+import eu.etaxonomy.cdm.model.description.TextData;
+import eu.etaxonomy.cdm.model.media.Media;
+import eu.etaxonomy.cdm.model.taxon.Taxon;
+import eu.etaxonomy.cdm.model.taxon.TaxonNode;
+import eu.etaxonomy.cdm.persistence.hibernate.permission.CdmPermissionEvaluator;
+import eu.etaxonomy.cdm.test.integration.CdmTransactionalIntegrationTestWithSecurity;
+
+
+
+/**
+ * Test class only for development purposes, must be run in suite.
+ *
+ */
+//@RunWith(UnitilsJUnit4TestClassRunner.class)
+//@SpringApplicationContext({"/eu/etaxonomy/cdm/applicationContextSecurity.xml"})
+//@Transactional
+@Ignore // should be ignored
+@DataSet("SecurityTest.xml")
+public class SecurityWithTransaction extends CdmTransactionalIntegrationTestWithSecurity {
+
+ @SuppressWarnings("unused")
+ private static final Logger logger = Logger.getLogger(SecurityWithTransaction.class);
+
+ @SpringBeanByName
+ private ITaxonService taxonService;
+
+ @SpringBeanByName
+ private IDescriptionService descriptionService;
+
+ @SpringBeanByName
+ private ITaxonNodeService taxonNodeService;
+
+ @SpringBeanByName
+ private IUserService userService;
+
+
+ @TestDataSource
+ protected DataSource dataSource;
+
+ private Authentication authentication;
+
+ @SpringBeanByName
+ private AuthenticationManager authenticationManager;
+
+ @SpringBeanByName
+ private CdmPermissionEvaluator permissionEvaluator;
+
+ private UsernamePasswordAuthenticationToken token;
+
+
+ @Before
+ public void setUp(){
+ token = new UsernamePasswordAuthenticationToken("admin", "sPePhAz6");
+ }
+
+ @Test
+ public void testDeleteTaxon(){
+ token = new UsernamePasswordAuthenticationToken("taxonomist", "test4");
+ authentication = authenticationManager.authenticate(token);
+ SecurityContext context = SecurityContextHolder.getContext();
+ context.setAuthentication(authentication);
+ Taxon actualTaxon = (Taxon)taxonService.find(UUID.fromString("7b8b5cb3-37ba-4dba-91ac-4c6ffd6ac331"));
+
+ //try {
+ DeleteResult result = taxonService.deleteTaxon(actualTaxon.getUuid(), null, null);
+ /*} catch (DataChangeNoRollbackException e) {
+ Assert.fail();
+ }*/
+ if (!result.isOk()){
+ Assert.fail();
+ }
+ }
+
+
+ @Test
+ public void testSaveOrUpdateDescription(){
+
+ authentication = authenticationManager.authenticate(new UsernamePasswordAuthenticationToken("descriptionEditor", "test"));
+ SecurityContext context = SecurityContextHolder.getContext();
+ context.setAuthentication(authentication);
+ /*Taxon taxon = (Taxon) taxonService.load(UUID.fromString("7b8b5cb3-37ba-4dba-91ac-4c6ffd6ac331"));
+
+ Set<TaxonDescription> descriptions = taxon.getDescriptions();
+
+ Iterator<TaxonDescription> iterator = descriptions.iterator();
+
+ TaxonDescription description = iterator.next();*/
+ TaxonDescription description = (TaxonDescription) descriptionService.find(UUID.fromString("eb17b80a-9be6-4642-a6a8-b19a318925e6"));
+
+ TextData textData = new TextData();
+ textData.setFeature(Feature.ECOLOGY());
+ Media media = Media.NewInstance();
+ textData.addMedia(media);
+
+
+
+ //descriptionService.saveDescriptionElement(textData);
+ description.addElement(textData);
+
+ descriptionService.saveOrUpdate(description);
+
+ Taxon taxon = (Taxon) taxonService.find(UUID.fromString("7b8b5cb3-37ba-4dba-91ac-4c6ffd6ac331"));
+ Set<TaxonDescription> descriptions = taxon.getDescriptions();
+
+ Iterator<TaxonDescription> iterator = descriptions.iterator();
+
+ description = iterator.next();
+ assertEquals(1, descriptions.size());
+ assertEquals(2,description.getElements().size());
+
+
+
+ }
+
+ @Test
+ public void testAllowOnlyAccessToPartOfTree(){
+ authentication = authenticationManager.authenticate(new UsernamePasswordAuthenticationToken("partEditor", "test4"));
+ SecurityContext context = SecurityContextHolder.getContext();
+ context.setAuthentication(authentication);
+
+ Taxon tribe = (Taxon)taxonService.find(UUID.fromString("928a0167-98cd-4555-bf72-52116d067625"));
+ Taxon taxon = (Taxon)taxonService.find(UUID.fromString("bc09aca6-06fd-4905-b1e7-cbf7cc65d783"));
+ Iterator<TaxonNode> it = tribe.getTaxonNodes().iterator();
+ TaxonNode node = it.next();
+
+ assertFalse(permissionEvaluator.hasPermission(authentication, node, "UPDATE"));
+ node = node.getChildNodes().iterator().next();
+
+ System.err.println(node.getUuid());
+
+ assertTrue(permissionEvaluator.hasPermission(authentication, node, "UPDATE"));
+ node = node.getChildNodes().iterator().next();
+ assertTrue(permissionEvaluator.hasPermission(authentication, node, "UPDATE"));
+ TaxonDescription description = TaxonDescription.NewInstance(taxon);
+
+ taxonNodeService.saveOrUpdate(node);
+ assertFalse(permissionEvaluator.hasPermission(authentication, description, "UPDATE"));
+
+
+ }
+
+ /* (non-Javadoc)
+ * @see eu.etaxonomy.cdm.test.integration.CdmIntegrationTest#createTestData()
+ */
+ @Override
+ public void createTestDataSet() throws FileNotFoundException {
+ // TODO Auto-generated method stub
+
+ }
+
+
+}
import org.junit.Ignore;
import org.junit.Test;
import org.springframework.jdbc.datasource.TransactionAwareDataSourceProxy;
-import org.springframework.orm.hibernate4.HibernateSystemException;
-import org.springframework.orm.hibernate4.SessionHolder;
+import org.springframework.orm.hibernate5.HibernateSystemException;
+import org.springframework.orm.hibernate5.SessionHolder;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.TransactionStatus;
import org.springframework.transaction.support.TransactionSynchronizationManager;
@Override
public void createTestDataSet() throws FileNotFoundException {
// TODO Auto-generated method stub
-
+
}
import eu.etaxonomy.cdm.model.name.BotanicalName;
import eu.etaxonomy.cdm.model.name.HybridRelationshipType;
import eu.etaxonomy.cdm.model.name.NameRelationshipType;
-import eu.etaxonomy.cdm.model.name.NomenclaturalCode;
import eu.etaxonomy.cdm.model.name.Rank;
import eu.etaxonomy.cdm.model.name.TaxonNameBase;
import eu.etaxonomy.cdm.model.reference.Reference;
String server = "localhost";
String database = (schema == DbSchemaValidation.VALIDATE ? "cdm35" : "cdm36");
- database = "testCategoricalData";
+ database = "test";
// database = "350_editor_test";
String username = "edit";
dataSource = CdmDataSource.NewMySqlInstance(server, database, username, AccountStore.readOrStorePassword(server, database, username, null));
//// dataSource = CdmDataSource.NewSqlServer2005Instance(server, database, port, username, AccountStore.readOrStorePassword(server, database, username, null));
//
//H2
- String path = "C:\\Users\\a.mueller\\eclipse\\svn\\cdmlib-trunk\\cdmlib-remote-webapp\\src\\test\\resources\\h2";
-// String path = "C:\\Users\\pesiimport\\.cdmLibrary\\writableResources\\h2\\LocalH2_test34";
- username = "sa";
- dataSource = CdmDataSource.NewH2EmbeddedInstance("cdmTest", username, "", path, NomenclaturalCode.ICNAFP);
+// String path = "C:\\Users\\pesiimport\\.cdmLibrary\\writableResources\\h2\\LocalH2_test34";
+// String path = "C:\\Users\\a.mueller\\eclipse\\svn\\cdmlib-trunk\\cdmlib-remote-webapp\\src\\test\\resources\\h2";
+// username = "sa";
+// dataSource = CdmDataSource.NewH2EmbeddedInstance("cdmTest", username, "", path, NomenclaturalCode.ICNAFP);
+
// dataSource = CdmDataSource.NewH2EmbeddedInstance(database, username, "sa", NomenclaturalCode.ICNAFP);
// try {
//CdmPersistentDataSource.save(dataSource.getName(), dataSource);
CdmApplicationController appCtr;
- appCtr = CdmApplicationController.NewInstance(dataSource,schema);
+ appCtr = CdmApplicationController.NewInstance(dataSource, schema);
- for (int i= 1; i<10; i++){
+ logger.warn("Start adding persons");
+ for (int i= 1; i<100; i++){
addPerson(appCtr);
+ logger.warn("Added "+ i);
}
+ int n = appCtr.getAgentService().count(null);
+ logger.warn("End adding " + n + " persons");
// appCtr.getCommonService().createFullSampleData();
// insertSomeData(appCtr);
// deleteHighLevelNode(appCtr); //->problem with Duplicate Key in Classification_TaxonNode
appCtr.close();
+ System.exit(0);
}
<!-- TODO trying to use a managed session context
<prop key="hibernate.current_session_context_class">managed</prop>
-->
-<!-- temp for h4 migration testing -->
-<!-- <prop key="hibernate.connection.driver_class">org.h2.Driver</prop> -->
-<!-- <prop key="hibernate.connection.url">jdbc:h2:mem:cdm</prop> -->
-<!-- <prop key="hibernate.connection.username">sa</prop> -->
-<!-- <prop key="hibernate.connection.password"></prop> -->
-<!-- <prop key="hibernate.search.lucene_version">LUCENE_36</prop> -->
-<!-- end temp for h4 migration testing -->
+
<prop key="hibernate.search.default.indexBase">./target/index</prop>
<!-- hibernate.search.default.exclusive_index_use=false
locks must not only be released after application shutdown in test environment -->
+++ /dev/null
-<?xml version="1.0" encoding="UTF-8"?>
-<beans xmlns="http://www.springframework.org/schema/beans"
- xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xmlns:context="http://www.springframework.org/schema/context"
- xmlns:tx="http://www.springframework.org/schema/tx"
- xmlns:security="http://www.springframework.org/schema/security"
- xsi:schemaLocation="http://www.springframework.org/schema/beans
- http://www.springframework.org/schema/beans/spring-beans-3.0.xsd
- http://www.springframework.org/schema/tx http://www.springframework.org/schema/tx/spring-tx-4.0.xsd
- http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context-4.0.xsd
- http://www.springframework.org/schema/security
- http://www.springframework.org/schema/security/spring-security-3.0.4.xsd">
-
- <!--
-
- ************************************************************************
- * *
- * DETLETE this file, only kept as reference, replaced by *
- * *-securityTest.xml *
- * *
- ************************************************************************
- -->
-
- <!-- <import resource="classpath:/eu/etaxonomy/cdm/defaultSecurityContext.xml"/> -->
-
- <!--<import resource="classpath:/eu/etaxonomy/cdm/persistence.xml"/> -->
-
- <context:component-scan base-package="eu/etaxonomy/cdm/persistence_security">
- <context:exclude-filter type="regex" expression="eu\.etaxonomy\.cdm\.persistence\.dao\.hibernate\.taxon\.TaxonAlternativeSpellingSuggestionParser"/>
- </context:component-scan>
- <context:component-scan base-package="eu/etaxonomy/cdm/database"/>
-
- <!-- includes service beans e.g. in eu/etaxonomy/cdm/api -->
- <context:component-scan base-package="eu/etaxonomy/cdm/api/service">
- <!-- <context:exclude-filter type="regex" expression="eu\.etaxonomy\.cdm\.api\.service\.UserService"/> -->
- </context:component-scan>
-
- <context:component-scan base-package="eu/etaxonomy/cdm/api/application"/>
-
- <!-- enable the configuration of transactional behavior based on annotations -->
- <tx:annotation-driven transaction-manager="transactionManager"/>
-
- <bean id="transactionManager"
- class="org.springframework.orm.hibernate4.HibernateTransactionManager">
- <property name="sessionFactory" ref="sessionFactory"/>
- </bean>
-
- <bean id="sessionFactory" class="org.springframework.orm.hibernate4.LocalSessionFactoryBean">
- <property name="namingStrategy">
- <bean class="org.hibernate.cfg.DefaultComponentSafeNamingStrategy" />
- </property>
-
- <property name="configLocation" value="classpath:eu/etaxonomy/cdm/hibernate.cfg.xml"/>
- <property name="configurationClass" value="org.hibernate.cfg.AnnotationConfiguration"/>
- <property name="dataSource" ref="dataSource"/>
- <property name="hibernateProperties" ref="hibernateProperties"/>
- </bean>
-
-
- <bean id="dataSource" class="org.unitils.database.UnitilsDataSourceFactoryBean"/>
-
- <bean id="hibernateProperties"
- class="org.springframework.beans.factory.config.PropertiesFactoryBean">
- <property name="properties">
- <props>
- <prop key="hibernate.dialect">org.hibernate.dialect.HSQLCorrectedDialect</prop>
- <prop key="hibernate.cache.region.factory_class">org.hibernate.cache.internal.NoCachingRegionFactory</prop>
- <!-- TODO trying to use a managed session context -->
- <!-- <prop key="hibernate.current_session_context_class">managed</prop>-->
- <prop key="hibernate.show_sql">false</prop>
- <prop key="hibernate.format_sql">false</prop>
- <prop key="hibernate.search.default.directory_provider">org.hibernate.search.store.impl.FSDirectoryProvider</prop>
- <prop key="hibernate.search.default.indexBase">./target/test-classes</prop>
- </props>
- </property>
- </bean>
-
- <bean id="authenticationManager" class="org.springframework.security.authentication.ProviderManager">
- <constructor-arg>
- <list>
- <bean class="org.springframework.security.authentication.TestingAuthenticationProvider"/>
- <ref local="daoAuthenticationProvider"/>
- </list>
- </constructor-arg>
- </bean>
-
- <bean id="daoAuthenticationProvider" class="org.springframework.security.authentication.dao.DaoAuthenticationProvider">
- <property name="userDetailsService" ref="userService"/>
- <property name="saltSource" ref="saltSource"/>
- <property name="passwordEncoder" ref="passwordEncoder"/>
- </bean>
-
- <bean id="passwordEncoder" class="org.springframework.security.authentication.encoding.Md5PasswordEncoder"/>
-
- <bean id="saltSource" class="org.springframework.security.authentication.dao.ReflectionSaltSource">
- <property name="userPropertyToUse" value="getUsername"/>
- </bean>
-
- <security:global-method-security pre-post-annotations="enabled">
- <security:expression-handler ref="expressionHandler"/>
- </security:global-method-security>
-
- <bean id="expressionHandler" class="org.springframework.security.access.expression.method.DefaultMethodSecurityExpressionHandler">
- <property name="permissionEvaluator" ref="cdmPermissionEvaluator"/>
- </bean>
-
-
- <bean class="eu.etaxonomy.cdm.permission.CdmPermissionEvaluator" id="cdmPermissionEvaluator"/>
-
- <bean id="userService" class="eu.etaxonomy.cdm.api.service.UserService">
-
- </bean>
-
-
- <bean id="groupService" class="eu.etaxonomy.cdm.api.service.GroupServiceImpl">
-
- </bean>
-
-
- <bean id="titleAndNameCacheAutoInitializer" class="eu.etaxonomy.cdm.persistence.dao.initializer.TitleAndNameCacheAutoInitializer"></bean>
-
- <bean id="defaultBeanInitializer" class="eu.etaxonomy.cdm.persistence.dao.hibernate.HibernateBeanInitializer">
- <property name="beanAutoInitializers">
- <map>
- <entry key="eu.etaxonomy.cdm.model.common.IdentifiableEntity" value-ref="titleAndNameCacheAutoInitializer" />
- </map>
- </property>
- </bean>
-</beans>
\ No newline at end of file
#log4j.logger.org.hibernate.engine.transaction=debug
#log4j.logger.org.springframework.transaction.support.TransactionSynchronizationManager=trace
#log4j.logger.org.springframework.orm.hibernate3.HibernateTransactionManager=trace
-#log4j.logger.org.springframework.orm.hibernate4.HibernateTransactionManager=trace
-#log4j.logger.org.springframework.orm.hibernate4=trace
+#log4j.logger.org.springframework.orm.hibernate5.HibernateTransactionManager=trace
+#log4j.logger.org.springframework.orm.hibernate5=trace
#log4j.logger.org.springframework.orm.hibernate3=trace
### enable logging if you want to track what is happening in the database ###
-#\r
-# NOTE: this is the unitils.properties file for the cdmlib-services module\r
-# a separate unitils.properties exists for cdmlib-io and for cdmlib-persistence\r
-#\r
-#\r
-# the list of all properties is found in\r
-# http://unitils.org/unitils-default.properties\r
-#\r
-\r
-### Unitils Modules ###\r
-# List of modules that is loaded. Overloading this list is normally not useful, unless you want to add a custom\r
-# module. Disabling a module can be performed by setting unitils.module.<modulename>.enabled to false.\r
-# If a module's specific dependencies are not found (e.g. hibernate is not in you classpath), this module is not loaded,\r
-# even if it is in this list and the enabled property is set to true. It's therefore not strictly necessary to disable\r
-# any of these modules.\r
-#DEFAULT: unitils.modules=database,dbunit,hibernate,mock,easymock,inject,spring,jpa\r
-unitils.module.easymock.enabled=false\r
-unitils.module.mock.enabled=false\r
-\r
-\r
-### Database ###\r
-#\r
-# Name or path of the user specific properties file. This file should contain the necessary parameters to connect to the\r
-# developer's own unit test schema. It is recommended to override the name of this file in the project specific properties\r
-# file, to include the name of the project. The system will try to find this file in the classpath, the user home folder\r
-# (recommended) or the local filesystem.\r
-unitils.configuration.localFileName=unitils-cdmlib-local.properties\r
-#\r
-updateDataBaseSchema.enabled=true\r
-#\r
-dataSetStructureGenerator.xsd.dirName=src/test/resources/eu/etaxonomy/cdm/service/\r
-#\r
-dbMaintainer.script.locations=src/test/resources/dbscripts\r
-dbMaintainer.dbVersionSource.autoCreateVersionTable=true\r
-dbMaintainer.autoCreateExecutedScriptsTable=true\r
-\r
-\r
-#\r
-# default database configurations for HSQL, H2 and MYSQL:\r
-#\r
-\r
-# HSQL #\r
-#\r
-# NOTE: hsqldb v. <1.9 has a bug (http://sourceforge.net/tracker/?func=detail&atid=378131&aid=1407528&group_id=23316)\r
-# due to which it is not possible to use batch updates, so we need to disable hibernate batching when using\r
-# hsqldb in the app context:\r
-#\r
-#database.driverClassName=org.hsqldb.jdbcDriver\r
-#database.url=jdbc:hsqldb:mem:cdm\r
-##database.url=jdbc:hsqldb:file:/home/andreas/.hsqldb/cdm;hsqldb.default_table_type=cached\r
-#database.dialect=hsqldb\r
-#dbMaintainer.script.fileExtensions=hsqldb.ddl\r
-#database.userName=sa\r
-#database.password=\r
-#database.schemaNames=PUBLIC\r
-#org.dbunit.dataset.datatype.IDataTypeFactory.implClassName.hsqldb=org.dbunit.ext.hsqldb.HsqldbDataTypeFactory\r
-\r
-# H2 #\r
-#\r
-database.driverClassName=org.h2.Driver\r
-#database.driverClassName=com.p6spy.engine.spy.P6SpyDriver\r
-database.url=jdbc:h2:mem:cdm2\r
-#database.url=jdbc:h2:file:~/.h2/cdm;TRACE_LEVEL_FILE=2\r
-database.dialect=h2\r
-dbMaintainer.script.fileExtensions=h2.sql\r
-database.userName=sa\r
-database.password=\r
-database.schemaNames=PUBLIC\r
-org.dbunit.dataset.datatype.IDataTypeFactory.implClassName.h2=org.dbunit.ext.h2.H2DataTypeFactory\r
-#org.dbunit.dataset.datatype.IDataTypeFactory.implClassName=org.dbunit.ext.h2.H2DataTypeFactory\r
-org.unitils.core.dbsupport.DbSupport.implClassName.h2=eu.etaxonomy.cdm.database.H2DbSupport\r
-database.storedIndentifierCase.h2=auto\r
-database.identifierQuoteString.h2=auto\r
-\r
-#\r
-# MySQL #\r
-# cannot be used\r
-# 1. you need another ddl or sql file to generate the schema\r
-# 2. our DataSet xml files and the dataset.dtd contain upper-case table names like\r
-# 'TERMVOCABULARY_REPRESENTATION_AUD' but hibernate expects CamelCase with mysql !!!!\r
-# maybe we can workaround this by implementing a custom EJB3NamingStrategy like\r
-# UppercaseTableNamingStrategy it must be set in the persitence.xml which returns table names\r
-# always in uppercase, but this again causes problems with the @Tabel annotations which must\r
-# exactly match the table names in the database. This means in order to use mysql for tests we would\r
-# have to change the table names in all of our DataSet files.\r
-#\r
-#database.driverClassName=com.mysql.jdbc.Driver\r
-#database.url=jdbc:mysql://127.0.0.1:3306/test\r
-#database.userName=edit\r
-#database.password=wp5\r
-#database.dialect=mysql\r
-#database.schemaNames=test\r
-#dbMaintainer.script.fileExtensions=mysql.sql\r
-#org.dbmaintain.dbsupport.DbSupport.implClassName.mysql=org.dbmaintain.dbsupport.impl.MySqlDbSupport\r
-#org.dbunit.dataset.datatype.IDataTypeFactory.implClassName.mysqldb=org.dbunit.ext.mysql.MySqlDataTypeFactory\r
-\r
-\r
-#### Transaction mode ###\r
-#\r
-# If set to commit or rollback, each test is run in a transaction,\r
-# which is committed or rolled back after the test is finished.\r
-# Since we have many tests with incomplete data these tests would be\r
-# failing during commit so it is better use rollback as default\r
-# and set commit for individual test where necessary\r
-DatabaseModule.Transactional.value.default=rollback\r
-\r
-# org.unitils.database.transaction.impl.DefaultUnitilsTransactionManager is used by default:\r
-#\r
-# Implements transactions for unit tests, by delegating to a spring PlatformTransactionManager.\r
-# The concrete implementation of PlatformTransactionManager that is used depends on the test class.\r
-# If a custom PlatformTransactionManager was configured in a spring ApplicationContext, this one is used.\r
-# If not, a suitable subclass of PlatformTransactionManager is created, depending on the configuration\r
-# of a test. E.g. if some ORM persistence unit was configured on the test, a PlatformTransactionManager\r
-# that can offer transactional behavior for such a persistence unit is used.\r
-# If no such configuration is found, a DataSourceTransactionManager is used.\r
-#\r
-# org.unitils.database.transaction.UnitilsTransactionManager.implClassName=org.unitils.database.transaction.impl.SpringTransactionManager\r
-\r
-### Hibernate ###\r
-#\r
-HibernateModule.configuration.implClassName=org.hibernate.cfg.Configuration\r
+#
+# NOTE: this is the unitils.properties file for the cdmlib-services module
+# a separate unitils.properties exists for cdmlib-io and for cdmlib-persistence
+#
+#
+# the list of all properties is found in
+# http://unitils.org/unitils-default.properties
+#
+
+### Unitils Modules ###
+# List of modules that is loaded. Overloading this list is normally not useful, unless you want to add a custom
+# module. Disabling a module can be performed by setting unitils.module.<modulename>.enabled to false.
+# If a module's specific dependencies are not found (e.g. hibernate is not in you classpath), this module is not loaded,
+# even if it is in this list and the enabled property is set to true. It's therefore not strictly necessary to disable
+# any of these modules.
+#DEFAULT: unitils.modules=database,dbunit,hibernate,mock,easymock,inject,spring,jpa
+unitils.module.easymock.enabled=false
+unitils.module.mock.enabled=false
+
+
+### Database ###
+#
+# Name or path of the user specific properties file. This file should contain the necessary parameters to connect to the
+# developer's own unit test schema. It is recommended to override the name of this file in the project specific properties
+# file, to include the name of the project. The system will try to find this file in the classpath, the user home folder
+# (recommended) or the local filesystem.
+unitils.configuration.localFileName=unitils-cdmlib-local.properties
+#
+updateDataBaseSchema.enabled=true
+#
+dataSetStructureGenerator.xsd.dirName=src/test/resources/eu/etaxonomy/cdm/service/
+#
+dbMaintainer.script.locations=src/test/resources/dbscripts
+dbMaintainer.dbVersionSource.autoCreateVersionTable=true
+dbMaintainer.autoCreateExecutedScriptsTable=true
+
+
+#
+# default database configurations for HSQL, H2 and MYSQL:
+#
+
+# HSQL #
+#
+# NOTE: hsqldb v. <1.9 has a bug (http://sourceforge.net/tracker/?func=detail&atid=378131&aid=1407528&group_id=23316)
+# due to which it is not possible to use batch updates, so we need to disable hibernate batching when using
+# hsqldb in the app context:
+#
+#database.driverClassName=org.hsqldb.jdbcDriver
+#database.url=jdbc:hsqldb:mem:cdm
+##database.url=jdbc:hsqldb:file:/home/andreas/.hsqldb/cdm;hsqldb.default_table_type=cached
+#database.dialect=hsqldb
+#dbMaintainer.script.fileExtensions=hsqldb.ddl
+#database.userName=sa
+#database.password=
+#database.schemaNames=PUBLIC
+#org.dbunit.dataset.datatype.IDataTypeFactory.implClassName.hsqldb=org.dbunit.ext.hsqldb.HsqldbDataTypeFactory
+
+# H2 #
+#
+database.driverClassName=org.h2.Driver
+#database.driverClassName=com.p6spy.engine.spy.P6SpyDriver
+database.url=jdbc:h2:mem:cdm2
+#database.url=jdbc:h2:file:~/.h2/cdm;TRACE_LEVEL_FILE=2
+database.dialect=h2
+dbMaintainer.script.fileExtensions=h2.sql
+database.userName=sa
+database.password=
+database.schemaNames=PUBLIC
+org.dbunit.dataset.datatype.IDataTypeFactory.implClassName.h2=org.dbunit.ext.h2.H2DataTypeFactory
+#org.dbunit.dataset.datatype.IDataTypeFactory.implClassName=org.dbunit.ext.h2.H2DataTypeFactory
+org.unitils.core.dbsupport.DbSupport.implClassName.h2=eu.etaxonomy.cdm.database.H2DbSupport
+database.storedIndentifierCase.h2=auto
+database.identifierQuoteString.h2=auto
+
+#
+# MySQL #
+# cannot be used
+# 1. you need another ddl or sql file to generate the schema
+# 2. our DataSet xml files and the dataset.dtd contain upper-case table names like
+# 'TERMVOCABULARY_REPRESENTATION_AUD' but hibernate expects CamelCase with mysql !!!!
+# maybe we can workaround this by implementing a custom EJB3NamingStrategy like
+# UppercaseTableNamingStrategy it must be set in the persitence.xml which returns table names
+# always in uppercase, but this again causes problems with the @Tabel annotations which must
+# exactly match the table names in the database. This means in order to use mysql for tests we would
+# have to change the table names in all of our DataSet files.
+#
+#database.driverClassName=com.mysql.jdbc.Driver
+#database.url=jdbc:mysql://127.0.0.1:3306/test
+#database.userName=edit
+#database.password=wp5
+#database.dialect=mysql
+#database.schemaNames=test
+#dbMaintainer.script.fileExtensions=mysql.sql
+#org.dbmaintain.dbsupport.DbSupport.implClassName.mysql=org.dbmaintain.dbsupport.impl.MySqlDbSupport
+#org.dbunit.dataset.datatype.IDataTypeFactory.implClassName.mysqldb=org.dbunit.ext.mysql.MySqlDataTypeFactory
+
+
+#### Transaction mode ###
+#
+# If set to commit or rollback, each test is run in a transaction,
+# which is committed or rolled back after the test is finished.
+# Since we have many tests with incomplete data these tests would be
+# failing during commit so it is better use rollback as default
+# and set commit for individual test where necessary
+DatabaseModule.Transactional.value.default=rollback
+
+# org.unitils.database.transaction.impl.DefaultUnitilsTransactionManager is used by default:
+#
+# Implements transactions for unit tests, by delegating to a spring PlatformTransactionManager.
+# The concrete implementation of PlatformTransactionManager that is used depends on the test class.
+# If a custom PlatformTransactionManager was configured in a spring ApplicationContext, this one is used.
+# If not, a suitable subclass of PlatformTransactionManager is created, depending on the configuration
+# of a test. E.g. if some ORM persistence unit was configured on the test, a PlatformTransactionManager
+# that can offer transactional behavior for such a persistence unit is used.
+# If no such configuration is found, a DataSourceTransactionManager is used.
+#
+# org.unitils.database.transaction.UnitilsTransactionManager.implClassName=org.unitils.database.transaction.impl.SpringTransactionManager
+
+### Hibernate ###
+#
+HibernateModule.configuration.implClassName=org.hibernate.cfg.Configuration
+
+## Same as defined in hibernate.cfg.xml.
+hibernate.connection.provider_class=org.hibernate.engine.jdbc.connections.internal.DatasourceConnectionProviderImpl
+
+#Created for hibernate 5 support, not yet available in standard unitils
+unitils.module.hibernate.className=org.unitils.orm.hibernate.Hibernate5Module
+
+#Since Hibernate 5 (or earlier > 4.1.10) using the Proxy does not correctly release
+#the connections to the pool after transaction finishes.
+dataSource.wrapInTransactionalProxy=false
<modules>
<module>cdmlib-commons</module>
<module>cdmlib-model</module>
+ <module>cdmlib-test</module>
<module>cdmlib-persistence</module>
<module>cdmlib-services</module>
<module>cdmlib-ext</module>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<java.codelevel>1.6</java.codelevel>
<spring.version>4.2.2.RELEASE</spring.version>
- <spring-security.version>4.0.2.RELEASE</spring-security.version>
- <hibernate.version>4.3.11.Final</hibernate.version>
-<!-- <hibernate.version>5.0.2.Final</hibernate.version> -->
+ <spring-security.version>4.0.3.RELEASE</spring-security.version>
<hibernate-validator.version>5.2.2.Final</hibernate-validator.version>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
- <unitils.version>3.4.2</unitils.version>
- <lucene.version>3.6.2</lucene.version>
+ <hibernate.version>5.0.3.Final</hibernate.version>
+ <hibernate-search.version>5.5.0.Final</hibernate-search.version>
+ <lucene.version>5.3.1</lucene.version>
+ <unitils.version>3.4.2</unitils.version>
<hsqldb.version>2.3.3</hsqldb.version>
<!-- <hsqldb.version>1.8.0.10</hsqldb.version> -->
<!-- 1.8.0.10, 1.9.0-rc6 (manually build only for debugging) -->
<groupId>org.unitils</groupId>
<artifactId>unitils-database</artifactId>
<version>${unitils.version}</version>
- <scope>test</scope>
+ <!-- <scope>test</scope> -->
<exclusions>
<exclusion>
<groupId>org.springframework</groupId>
<groupId>org.unitils</groupId>
<artifactId>unitils-dbmaintainer</artifactId>
<version>${unitils.version}</version>
- <scope>test</scope>
+ <!-- <scope>test</scope> -->
<exclusions>
<exclusion>
<groupId>org.hibernate</groupId>
<groupId>org.unitils</groupId>
<artifactId>unitils-dbunit</artifactId>
<version>${unitils.version}</version>
- <scope>test</scope>
+ <!-- <scope>test</scope> -->
</dependency>
<dependency>
<groupId>org.unitils</groupId>
<artifactId>unitils-easymock</artifactId>
<version>${unitils.version}</version>
- <scope>test</scope>
+ <!-- <scope>test</scope> -->
</dependency>
<dependency>
<groupId>org.unitils</groupId>
<artifactId>unitils-mock</artifactId>
<version>${unitils.version}</version>
- <scope>test</scope>
+ <!-- <scope>test</scope> -->
</dependency>
<dependency>
<groupId>org.unitils</groupId>
<artifactId>unitils-inject</artifactId>
<version>${unitils.version}</version>
- <scope>test</scope>
+ <!-- <scope>test</scope> -->
</dependency>
<dependency>
<groupId>org.unitils</groupId>
<artifactId>unitils-orm</artifactId>
<version>${unitils.version}</version>
- <scope>test</scope>
+ <!-- <scope>test</scope> -->
<exclusions>
<exclusion>
<groupId>javax.persistence</groupId>
<groupId>xmlunit</groupId>
<artifactId>xmlunit</artifactId>
<version>1.6</version>
- <scope>test</scope>
+ <!-- <scope>test</scope> -->
</dependency>
<dependency>
<groupId>org.dbunit</groupId>
<artifactId>dbunit</artifactId>
<version>2.4.9</version>
- <scope>test</scope>
+ <!-- <scope>test</scope> -->
</dependency>
- <!-- dependency of unitils database, but with lower number there ,
+ <!-- dependency of unitils-database, but with lower number there ,
higher number needed for compatibility with current hibernate and Java 1.6
PreparedStatement -->
<dependency>
<version>1.4</version>
<!-- <scope>test</scope> -->
</dependency>
+ <dependency>
+ <groupId>eu.etaxonomy</groupId>
+ <artifactId>cdmlib-test</artifactId>
+ <scope>test</scope>
+ <version>${project.version}</version>
+ </dependency>
<!-- ******* aspect ******* -->
<dependency>
<artifactId>usertype.jodatime</artifactId>
<version>2.0.1</version>
</dependency>
-
+
<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-core</artifactId>
<version>${lucene.version}</version>
</dependency>
+<<<<<<< HEAD
<!-- OLD concept, not under development anymore,
to be removed as soon as CdmMassIndexer is updated -->
<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-spellchecker</artifactId>
<version>3.6.2</version>
+=======
+ <dependency>
+ <groupId>org.apache.lucene</groupId>
+ <artifactId>lucene-suggest</artifactId>
+ <version>${lucene.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.lucene</groupId>
+ <artifactId>lucene-queryparser</artifactId>
+ <version>${lucene.version}</version>
+>>>>>>> Upgrade to hibernate 5 with persistence tests running #4716
</dependency>
<dependency>
<groupId>org.apache.lucene</groupId>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-c3p0</artifactId>
<version>${hibernate.version}</version>
- <exclusions>
- <!-- version 0.9.1.x has a bug in NewProxyPreparedStatement therefore
- we use com.mchange/c3po 0.9.2 instead -->
- <exclusion>
- <groupId>c3p0</groupId>
- <artifactId>c3p0</artifactId>
- </exclusion>
- </exclusions>
</dependency>
<dependency>
<groupId>com.mchange</groupId>
<artifactId>spring-jdbc</artifactId>
<version>${spring.version}</version>
</dependency>
+ <dependency>
+ <groupId>org.springframework</groupId>
+ <artifactId>spring-tx</artifactId>
+ <version>${spring.version}</version>
+ </dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-test</artifactId>