/**\r
* Copyright (C) 2007 EDIT\r
-* European Distributed Institute of Taxonomy \r
+* European Distributed Institute of Taxonomy\r
* http://www.e-taxonomy.eu\r
-* \r
+*\r
* The contents of this file are subject to the Mozilla Public License Version 1.1\r
* See LICENSE.TXT at the top of this package for the full license terms.\r
*/\r
*\r
*/\r
public abstract class CsvDemoBase extends CdmExportBase<CsvDemoExportConfigurator, CsvDemoExportState, IExportTransformer> implements ICdmExport<CsvDemoExportConfigurator, CsvDemoExportState>{\r
- private static final Logger logger = Logger.getLogger(CsvDemoBase.class);\r
- \r
+ private static final long serialVersionUID = -2962456879635841019L;\r
+\r
+ private static final Logger logger = Logger.getLogger(CsvDemoBase.class);\r
+\r
protected static final boolean IS_CORE = true;\r
- \r
- \r
- protected Set<Integer> existingRecordIds = new HashSet<Integer>();\r
- protected Set<UUID> existingRecordUuids = new HashSet<UUID>();\r
- \r
- \r
-\r
- /* (non-Javadoc)\r
- * @see eu.etaxonomy.cdm.io.common.CdmIoBase#countSteps()\r
- */\r
+\r
+\r
+ protected Set<Integer> existingRecordIds = new HashSet<>();\r
+ protected Set<UUID> existingRecordUuids = new HashSet<>();\r
+\r
+\r
@Override\r
public int countSteps() {\r
List<TaxonNode> allNodes = getClassificationService().getAllNodes();\r
return allNodes.size();\r
}\r
\r
- \r
- \r
+\r
+\r
/**\r
- * Returns the list of {@link TaxonNode taxon nodes} that are part in one of the given {@link Classification classifications} \r
+ * Returns the list of {@link TaxonNode taxon nodes} that are part in one of the given {@link Classification classifications}\r
* and do have a {@link Taxon} attached (empty taxon nodes should not but do exist in CDM databases).\r
- * If <code>classificationList</code> is <code>null</code> or empty then all {@link TaxonNode taxon nodes} of all \r
+ * If <code>classificationList</code> is <code>null</code> or empty then all {@link TaxonNode taxon nodes} of all\r
* {@link Classification classifications} are returned.<BR>\r
* Preliminary implementation. Better implement API method for this.\r
* @return\r
if (classificationList != null && classificationList.isEmpty()){\r
classificationList = null;\r
}\r
- \r
+\r
List<TaxonNode> allNodes = getClassificationService().getAllNodes();\r
List<TaxonNode> result = new ArrayList<TaxonNode>();\r
for (TaxonNode node : allNodes){\r
}\r
return result;\r
}\r
- \r
- \r
+\r
+\r
/**\r
* Creates the locationId, locality, countryCode triple\r
* @param record\r
protected String getTaxonLogString(TaxonBase<?> taxon) {\r
return taxon.getTitleCache() + "(" + taxon.getId() + ")";\r
}\r
- \r
+\r
\r
/**\r
* @param el\r
protected boolean recordExists(CdmBase el) {\r
return existingRecordIds.contains(el.getId());\r
}\r
- \r
+\r
\r
/**\r
* @param sec\r
protected void addExistingRecord(CdmBase cdmBase) {\r
existingRecordIds.add(cdmBase.getId());\r
}\r
- \r
+\r
/**\r
* @param el\r
* @return\r
protected boolean recordExistsUuid(CdmBase el) {\r
return existingRecordUuids.contains(el.getUuid());\r
}\r
- \r
+\r
/**\r
* @param sec\r
*/\r
protected void addExistingRecordUuid(CdmBase cdmBase) {\r
existingRecordUuids.add(cdmBase.getUuid());\r
}\r
- \r
+\r
\r
protected String getSources(ISourceable<?> sourceable, CsvTaxExportConfiguratorRedlist config) {\r
String result = "";\r
}\r
return result;\r
}\r
- \r
+\r
\r
/**\r
* @param config\r
FileOutputStream fos = new FileOutputStream(f);\r
return fos;\r
}\r
- \r
+\r
\r
/**\r
* @param config\r
*/\r
protected XMLStreamWriter createXmlStreamWriter(CsvTaxExportStateRedlist state, String fileName)\r
throws IOException, FileNotFoundException, XMLStreamException {\r
- XMLOutputFactory factory = XMLOutputFactory.newInstance(); \r
+ XMLOutputFactory factory = XMLOutputFactory.newInstance();\r
OutputStream os;\r
boolean useZip = state.isZip();\r
if (useZip){\r
XMLStreamWriter writer = factory.createXMLStreamWriter(os);\r
return writer;\r
}\r
- \r
+\r
\r
/**\r
* @param coreTaxFileName\r
* @throws FileNotFoundException\r
* @throws UnsupportedEncodingException\r
*/\r
- protected PrintWriter createPrintWriter(final String fileName, CsvTaxExportStateRedlist state) \r
+ protected PrintWriter createPrintWriter(final String fileName, CsvTaxExportStateRedlist state)\r
throws IOException, FileNotFoundException, UnsupportedEncodingException {\r
- \r
+\r
OutputStream os;\r
boolean useZip = state.isZip();\r
if (useZip){\r
os = createFileOutputStream(state.getConfig(), fileName);\r
}\r
PrintWriter writer = new PrintWriter(new OutputStreamWriter(os, "UTF8"), true);\r
- \r
+\r
return writer;\r
}\r
- \r
\r
\r
- \r
+\r
+\r
/**\r
* Closes the writer\r
* @param writer\r
writer.close();\r
}\r
}\r
- \r
\r
- \r
+\r
+\r
/**\r
* Closes the writer.\r
* Note: XMLStreamWriter does not close the underlying stream.\r
*\r
*/\r
public abstract class DwcaExportBase extends CdmExportBase<DwcaTaxExportConfigurator, DwcaTaxExportState, IExportTransformer> implements ICdmExport<DwcaTaxExportConfigurator, DwcaTaxExportState>{\r
+ private static final long serialVersionUID = -3214410418410044139L;\r
+\r
private static final Logger logger = Logger.getLogger(DwcaExportBase.class);\r
\r
protected static final boolean IS_CORE = true;\r
private String serverFileName = "-dwca_export-cdm.zip";\r
\r
\r
-\r
- /* (non-Javadoc)\r
- * @see eu.etaxonomy.cdm.io.common.CdmIoBase#countSteps()\r
- */\r
@Override\r
public int countSteps() {\r
List<TaxonNode> allNodes = getClassificationService().getAllNodes();\r
* @author n.hoffmann
* @created 24.09.2008
*/
-public abstract class AnnotatableDaoImpl<T extends AnnotatableEntity> extends VersionableDaoBase<T> implements IAnnotatableDao<T> {
- @SuppressWarnings("unused")
+public abstract class AnnotatableDaoImpl<T extends AnnotatableEntity>
+ extends VersionableDaoBase<T>
+ implements IAnnotatableDao<T> {
+
+ @SuppressWarnings("unused")
private static Logger logger = Logger.getLogger(AnnotatableDaoImpl.class);
/**
* @param start\r
* @param criteria\r
*/\r
- private void addLimitAndStart(Integer limit, Integer start, Criteria criteria) {\r
+ protected void addLimitAndStart(Integer limit, Integer start, Criteria criteria) {\r
if(limit != null) {\r
if(start != null) {\r
criteria.setFirstResult(start);\r
import eu.etaxonomy.cdm.persistence.query.OrderHint;
-public class IdentifiableDaoBase<T extends IdentifiableEntity> extends AnnotatableDaoImpl<T> implements IIdentifiableDao<T>{
+public class IdentifiableDaoBase<T extends IdentifiableEntity>
+ extends AnnotatableDaoImpl<T>
+ implements IIdentifiableDao<T>{
+
@SuppressWarnings("unused")
private static final Logger logger = Logger.getLogger(IdentifiableDaoBase.class);
\r
@Override\r
public Map<DescriptionBase, Set<DescriptionElementBase>> getDescriptionElements(WorkingSet workingSet, Set<Feature> features, Integer pageSize, Integer pageNumber, List<String> propertyPaths) {\r
- checkNotInPriorView("WorkingSetDao.getDescriptionElements(WorkingSet workingSet, Set<Feature> features, Integer pageSize,Integer pageNumber, List<OrderHint> orderHints, List<String> propertyPaths)");\r
- Query query = getSession().createQuery("select description from WorkingSet workingSet join workingSet.descriptions description order by description.titleCache asc");\r
+ checkNotInPriorView("WorkingSetDao.getDescriptionElements(WorkingSet workingSet, Set<Feature> features, Integer pageSize,Integer pageNumber, List<String> propertyPaths)");\r
+ Query query = getSession().createQuery("SELECT description FROM WorkingSet workingSet JOIN workingSet.descriptions description ORDER BY description.titleCache ASC");\r
\r
if(pageSize != null) {\r
query.setMaxResults(pageSize);\r
}\r
}\r
List<DescriptionBase> descriptions = query.list();\r
- Map<DescriptionBase, Set<DescriptionElementBase>> result = new HashMap<DescriptionBase, Set<DescriptionElementBase>>();\r
+ Map<DescriptionBase, Set<DescriptionElementBase>> result = new HashMap<>();\r
for(DescriptionBase description : descriptions) {\r
Criteria criteria = getSession().createCriteria(DescriptionElementBase.class);\r
criteria.add(Restrictions.eq("inDescription", description));\r
\r
List<DescriptionElementBase> r = criteria.list();\r
defaultBeanInitializer.initializeAll(r, propertyPaths);\r
- result.put(description, new HashSet<DescriptionElementBase>(r));\r
+ result.put(description, new HashSet<>(r));\r
}\r
return result;\r
}\r
*\r
* The contents of this file are subject to the Mozilla Public License Version 1.1\r
* See LICENSE.TXT at the top of this package for the full license terms.\r
-*/ \r
+*/\r
\r
package eu.etaxonomy.cdm.persistence.dao.hibernate.occurrence;\r
\r
@Repository\r
public class CollectionDaoHibernateImpl extends IdentifiableDaoBase<Collection> implements\r
ICollectionDao {\r
- \r
+\r
public CollectionDaoHibernateImpl() {\r
super(Collection.class);\r
indexedClasses = new Class[1];\r
indexedClasses[0] = Collection.class;\r
}\r
\r
- public List<Collection> getCollectionByCode(String code) {\r
+ @Override\r
+ public List<Collection> getCollectionByCode(String code) {\r
AuditEvent auditEvent = getAuditEventFromContext();\r
if(auditEvent.equals(AuditEvent.CURRENT_VIEW)) {\r
Criteria crit = getSession().createCriteria(Collection.class);\r
crit.add(Restrictions.eq("code", code));\r
- \r
- return (List<Collection>)crit.list();\r
+\r
+ return crit.list();\r
} else {\r
AuditQuery query = getAuditReader().createQuery().forEntitiesAtRevision(Collection.class,auditEvent.getRevisionNumber());\r
query.add(AuditEntity.property("code").eq(code));\r
- return (List<Collection>)query.getResultList();\r
+ return query.getResultList();\r
}\r
}\r
\r
@Override\r
public void rebuildIndex() {\r
- FullTextSession fullTextSession = Search.getFullTextSession(getSession());\r
- \r
- for(Collection collection : list(null,null)) { // re-index all taxon base\r
+ FullTextSession fullTextSession = Search.getFullTextSession(getSession());\r
\r
- Hibernate.initialize(collection.getSuperCollection());\r
- Hibernate.initialize(collection.getInstitute());\r
- fullTextSession.index(collection);\r
- }\r
- fullTextSession.flushToIndexes();\r
+ for(Collection collection : list(null,null)) { // re-index all taxon base\r
+\r
+ Hibernate.initialize(collection.getSuperCollection());\r
+ Hibernate.initialize(collection.getInstitute());\r
+ fullTextSession.index(collection);\r
+ }\r
+ fullTextSession.flushToIndexes();\r
}\r
}
import org.springframework.beans.factory.annotation.Autowired;\r
import org.springframework.stereotype.Repository;\r
\r
+import eu.etaxonomy.cdm.model.common.CdmBase;\r
import eu.etaxonomy.cdm.model.description.DescriptionBase;\r
import eu.etaxonomy.cdm.model.description.IndividualsAssociation;\r
import eu.etaxonomy.cdm.model.media.Media;\r
public void rebuildIndex() {\r
FullTextSession fullTextSession = Search.getFullTextSession(getSession());\r
\r
- for(SpecimenOrObservationBase occurrence : list(null,null)) { // re-index all taxon base\r
+ for(SpecimenOrObservationBase<?> occurrence : list(null,null)) { // re-index all taxon base\r
\r
- for(DeterminationEvent determination : (Set<DeterminationEvent>)occurrence.getDeterminations()) {\r
+ for(DeterminationEvent determination : occurrence.getDeterminations()) {\r
Hibernate.initialize(determination.getActor());\r
Hibernate.initialize(determination.getTaxon());\r
}\r
Hibernate.initialize(derivedUnit.getCollection().getInstitute());\r
}\r
Hibernate.initialize(derivedUnit.getStoredUnder());\r
- SpecimenOrObservationBase original = derivedUnit.getOriginalUnit();\r
+ SpecimenOrObservationBase<?> original = derivedUnit.getOriginalUnit();\r
if(original != null && original.isInstanceOf(FieldUnit.class)) {\r
- FieldUnit fieldUnit = original.deproxy(original, FieldUnit.class);\r
+ FieldUnit fieldUnit = CdmBase.deproxy(original, FieldUnit.class);\r
Hibernate.initialize(fieldUnit.getGatheringEvent());\r
if(fieldUnit.getGatheringEvent() != null) {\r
Hibernate.initialize(fieldUnit.getGatheringEvent().getActor());\r
\r
addOrder(criteria,orderHints);\r
\r
+ @SuppressWarnings("unchecked")\r
List<SpecimenOrObservationBase> results = criteria.list();\r
defaultBeanInitializer.initializeAll(results, propertyPaths);\r
return results;\r
\r
addOrder(criteria,orderHints);\r
\r
+ @SuppressWarnings("unchecked")\r
List<SpecimenOrObservationBase> results = criteria.list();\r
defaultBeanInitializer.initializeAll(results, propertyPaths);\r
return results;\r
addOrder(criteria,orderHints);\r
}\r
\r
+ @SuppressWarnings("unchecked")\r
List<T> results = criteria.list();\r
defaultBeanInitializer.initializeAll(results, propertyPaths);\r
return results;\r
}\r
- return Collections.EMPTY_LIST;\r
+ return Collections.emptyList();\r
}\r
\r
private <T extends SpecimenOrObservationBase> Criteria createFindOccurrenceCriteria(Class<T> clazz, String queryString,\r
String commonNameSubSelect = subSelects[3];
- logger.debug("taxonSubselect: " + taxonSubselect != null ? taxonSubselect: "NULL");
- logger.debug("synonymSubselect: " + synonymSubselect != null ? synonymSubselect: "NULL");
+ if (logger.isDebugEnabled()) {
+ logger.debug("taxonSubselect: " + (taxonSubselect != null ? taxonSubselect: "NULL"));
+ }
+ if (logger.isDebugEnabled()) {
+ logger.debug("synonymSubselect: " + (synonymSubselect != null ? synonymSubselect: "NULL"));
+ }
Query subTaxon = null;
Query subSynonym = null;
subTaxon.setParameter("classification", classification);
}
-
-
}
if(doSynonyms){
}
if(classification != null){
subCommonNames.setParameter("classification", classification);
-
}
-
-
}
List<Integer> taxa = new ArrayList<Integer>();
/**\r
* Copyright (C) 2008 EDIT\r
-* European Distributed Institute of Taxonomy \r
+* European Distributed Institute of Taxonomy\r
* http://www.e-taxonomy.eu\r
*/\r
\r
* @created 08.09.2008\r
*/\r
public interface IMediaDao extends IIdentifiableDao<Media> {\r
- \r
+\r
/**\r
* Return a count of MediaKeys, optionally filtered by the parameters passed.\r
- * \r
+ *\r
* @param taxonomicScope a Set of Taxon instances that define the taxonomic scope of the key (can be null)\r
* @param geoScopes a Set of NamedArea instances that define the geospatial scope of the key (can be null)\r
* @return a count of MediaKey instances\r
*/\r
public int countMediaKeys(Set<Taxon> taxonomicScope, Set<NamedArea> geoScopes);\r
- \r
+\r
/**\r
- * Return a List of MediaKeys, optionally filtered by the parameters passed. The MediaKey \r
+ * Return a List of MediaKeys, optionally filtered by the parameters passed. The MediaKey\r
* instances have the following properties initialized:\r
- * \r
+ *\r
* MediaKey.title\r
- * \r
+ *\r
* @param taxonomicScope a Set of Taxon instances that define the taxonomic scope of the key (can be null)\r
* @param geoScopes a Set of NamedArea instances that define the geospatial scope of the key (can be null)\r
* @param pageSize The maximum number of keys returned (can be null for all keys)\r
* @return a List of MediaKey instances\r
*/\r
public List<MediaKey> getMediaKeys(Set<Taxon> taxonomicScope, Set<NamedArea> geoScopes, Integer pageSize, Integer pageNumber, List<String> propertyPaths);\r
- \r
+\r
/**\r
* Return a count of the rights for this media entity\r
- * \r
+ *\r
* @param t The media entity\r
* @return a count of Rights instances\r
*/\r
+ @Override\r
public int countRights(Media t);\r
- \r
+\r
/**\r
* Return a List of the rights for this media entity\r
- * \r
+ *\r
* @param t The media entity\r
* @param pageSize The maximum number of rights returned (can be null for all rights)\r
* @param pageNumber The offset (in pageSize chunks) from the start of the result set (0 - based)\r
* @param propertyPaths properties to initialize - see {@link IBeanInitializer#initialize(Object, List)}\r
* @return a List of Rights instances\r
*/\r
- public List<Rights> getRights(Media t, Integer pageSize, Integer pageNumber, List<String> propertyPaths);\r
+ @Override\r
+ public List<Rights> getRights(Media t, Integer pageSize, Integer pageNumber, List<String> propertyPaths);\r
\r
}\r
return referencedEntityDao.saveAll(referencedEntityCollection);
}
- /**
- * TODO candidate for harmonization
- * new name getNames
- */
- public List<TaxonNameBase> getAllNames(int limit, int start){
- return dao.list(limit, start);
- }
-
/**
* TODO candidate for harmonization
* new name getNomenclaturalStatus