1
|
// $Id$
|
2
|
/**
|
3
|
* Copyright (C) 2007 EDIT
|
4
|
* European Distributed Institute of Taxonomy
|
5
|
* http://www.e-taxonomy.eu
|
6
|
*
|
7
|
* The contents of this file are subject to the Mozilla Public License Version 1.1
|
8
|
* See LICENSE.TXT at the top of this package for the full license terms.
|
9
|
*/
|
10
|
|
11
|
package eu.etaxonomy.cdm.api.service;
|
12
|
|
13
|
import java.util.ArrayList;
|
14
|
import java.util.Collection;
|
15
|
import java.util.Collections;
|
16
|
import java.util.Comparator;
|
17
|
import java.util.HashMap;
|
18
|
import java.util.List;
|
19
|
import java.util.Map;
|
20
|
import java.util.TreeMap;
|
21
|
import java.util.UUID;
|
22
|
|
23
|
import org.apache.commons.collections.CollectionUtils;
|
24
|
import org.apache.log4j.Logger;
|
25
|
import org.springframework.beans.factory.annotation.Autowired;
|
26
|
import org.springframework.stereotype.Service;
|
27
|
import org.springframework.transaction.annotation.Transactional;
|
28
|
|
29
|
import eu.etaxonomy.cdm.api.service.config.CreateHierarchyForClassificationConfigurator;
|
30
|
import eu.etaxonomy.cdm.api.service.config.NodeDeletionConfigurator.ChildHandling;
|
31
|
import eu.etaxonomy.cdm.api.service.config.TaxonDeletionConfigurator;
|
32
|
import eu.etaxonomy.cdm.api.service.dto.GroupedTaxonDTO;
|
33
|
import eu.etaxonomy.cdm.api.service.pager.Pager;
|
34
|
import eu.etaxonomy.cdm.api.service.pager.PagerUtils;
|
35
|
import eu.etaxonomy.cdm.api.service.pager.impl.AbstractPagerImpl;
|
36
|
import eu.etaxonomy.cdm.api.service.pager.impl.DefaultPagerImpl;
|
37
|
import eu.etaxonomy.cdm.common.monitor.IProgressMonitor;
|
38
|
import eu.etaxonomy.cdm.model.common.CdmBase;
|
39
|
import eu.etaxonomy.cdm.model.common.ITreeNode;
|
40
|
import eu.etaxonomy.cdm.model.description.DescriptionElementBase;
|
41
|
import eu.etaxonomy.cdm.model.description.TaxonDescription;
|
42
|
import eu.etaxonomy.cdm.model.media.Media;
|
43
|
import eu.etaxonomy.cdm.model.media.MediaRepresentation;
|
44
|
import eu.etaxonomy.cdm.model.media.MediaUtils;
|
45
|
import eu.etaxonomy.cdm.model.name.NonViralName;
|
46
|
import eu.etaxonomy.cdm.model.name.Rank;
|
47
|
import eu.etaxonomy.cdm.model.name.TaxonNameBase;
|
48
|
import eu.etaxonomy.cdm.model.taxon.Classification;
|
49
|
import eu.etaxonomy.cdm.model.taxon.ITaxonNodeComparator;
|
50
|
import eu.etaxonomy.cdm.model.taxon.ITaxonTreeNode;
|
51
|
import eu.etaxonomy.cdm.model.taxon.Taxon;
|
52
|
import eu.etaxonomy.cdm.model.taxon.TaxonNode;
|
53
|
import eu.etaxonomy.cdm.persistence.dao.initializer.IBeanInitializer;
|
54
|
import eu.etaxonomy.cdm.persistence.dao.taxon.IClassificationDao;
|
55
|
import eu.etaxonomy.cdm.persistence.dao.taxon.ITaxonDao;
|
56
|
import eu.etaxonomy.cdm.persistence.dao.taxon.ITaxonNodeDao;
|
57
|
import eu.etaxonomy.cdm.persistence.dto.ClassificationLookupDTO;
|
58
|
import eu.etaxonomy.cdm.persistence.dto.UuidAndTitleCache;
|
59
|
import eu.etaxonomy.cdm.persistence.query.OrderHint;
|
60
|
import eu.etaxonomy.cdm.strategy.cache.common.IIdentifiableEntityCacheStrategy;
|
61
|
import eu.etaxonomy.cdm.strategy.parser.NonViralNameParserImpl;
|
62
|
|
63
|
/**
|
64
|
* @author n.hoffmann
|
65
|
* @created Sep 21, 2009
|
66
|
*/
|
67
|
@Service
|
68
|
@Transactional(readOnly = true)
|
69
|
public class ClassificationServiceImpl extends IdentifiableServiceBase<Classification, IClassificationDao>
|
70
|
implements IClassificationService {
|
71
|
private static final Logger logger = Logger.getLogger(ClassificationServiceImpl.class);
|
72
|
|
73
|
@Autowired
|
74
|
private ITaxonNodeDao taxonNodeDao;
|
75
|
|
76
|
@Autowired
|
77
|
private ITaxonDao taxonDao;
|
78
|
|
79
|
@Autowired
|
80
|
private IBeanInitializer defaultBeanInitializer;
|
81
|
|
82
|
@Override
|
83
|
@Autowired
|
84
|
protected void setDao(IClassificationDao dao) {
|
85
|
this.dao = dao;
|
86
|
}
|
87
|
|
88
|
private Comparator<? super TaxonNode> taxonNodeComparator;
|
89
|
|
90
|
@Autowired
|
91
|
public void setTaxonNodeComparator(ITaxonNodeComparator<? super TaxonNode> taxonNodeComparator){
|
92
|
this.taxonNodeComparator = (Comparator<? super TaxonNode>) taxonNodeComparator;
|
93
|
}
|
94
|
|
95
|
@Override
|
96
|
public TaxonNode loadTaxonNodeByTaxon(Taxon taxon, UUID classificationUuid, List<String> propertyPaths){
|
97
|
Classification tree = dao.load(classificationUuid);
|
98
|
TaxonNode node = tree.getNode(taxon);
|
99
|
|
100
|
return loadTaxonNode(node.getUuid(), propertyPaths);
|
101
|
}
|
102
|
|
103
|
@Override
|
104
|
@Deprecated // use loadTaxonNode(UUID, List<String>) instead
|
105
|
public TaxonNode loadTaxonNode(TaxonNode taxonNode, List<String> propertyPaths){
|
106
|
return taxonNodeDao.load(taxonNode.getUuid(), propertyPaths);
|
107
|
}
|
108
|
|
109
|
public TaxonNode loadTaxonNode(UUID taxonNodeUuid, List<String> propertyPaths){
|
110
|
return taxonNodeDao.load(taxonNodeUuid, propertyPaths);
|
111
|
}
|
112
|
|
113
|
@Override
|
114
|
public List<TaxonNode> listRankSpecificRootNodes(Classification classification, Rank rank, Integer pageSize,
|
115
|
Integer pageIndex, List<String> propertyPaths) {
|
116
|
return pageRankSpecificRootNodes(classification, rank, pageSize, pageIndex, propertyPaths).getRecords();
|
117
|
}
|
118
|
|
119
|
@Override
|
120
|
public Pager<TaxonNode> pageRankSpecificRootNodes(Classification classification, Rank rank, Integer pageSize,
|
121
|
Integer pageIndex, List<String> propertyPaths) {
|
122
|
long[] numberOfResults = dao.countRankSpecificRootNodes(classification, rank);
|
123
|
long totalNumberOfResults = numberOfResults[0] + (numberOfResults.length > 1 ? numberOfResults[1] : 0);
|
124
|
|
125
|
List<TaxonNode> results = new ArrayList<TaxonNode>();
|
126
|
|
127
|
if (AbstractPagerImpl.hasResultsInRange(totalNumberOfResults, pageIndex, pageSize)) { // no point checking again
|
128
|
Integer limit = PagerUtils.limitFor(pageSize);
|
129
|
Integer start = PagerUtils.startFor(pageSize, pageIndex);
|
130
|
|
131
|
Integer remainingLimit = limit;
|
132
|
int[] queryIndexes = rank == null ? new int[]{0} : new int[]{0,1};
|
133
|
|
134
|
for(int queryIndex: queryIndexes) {
|
135
|
if(start != null && start > numberOfResults[queryIndex]) {
|
136
|
// start in next query with new start value
|
137
|
start = start - (int)numberOfResults[queryIndex];
|
138
|
continue;
|
139
|
}
|
140
|
|
141
|
List<TaxonNode> perQueryResults = dao.listRankSpecificRootNodes(classification, rank, remainingLimit, start, propertyPaths, queryIndex);
|
142
|
results.addAll(perQueryResults);
|
143
|
if(remainingLimit != null ){
|
144
|
remainingLimit = remainingLimit - results.size();
|
145
|
if(remainingLimit <= 0) {
|
146
|
// no need to run further queries if first query returned enough items!
|
147
|
break;
|
148
|
}
|
149
|
// start at with fist item of next query to fetch the remaining items
|
150
|
start = 0;
|
151
|
}
|
152
|
}
|
153
|
}
|
154
|
// long start_t = System.currentTimeMillis();
|
155
|
Collections.sort(results, taxonNodeComparator); // TODO is ordering during the hibernate query in the dao possible?
|
156
|
// System.err.println("service.pageRankSpecificRootNodes() - Collections.sort(results, taxonNodeComparator) " + (System.currentTimeMillis() - start_t));
|
157
|
return new DefaultPagerImpl<TaxonNode>(pageIndex, (int) totalNumberOfResults, pageSize, results);
|
158
|
|
159
|
}
|
160
|
|
161
|
/**
|
162
|
* @implements {@link IClassificationService#loadTreeBranch(TaxonNode, Rank, List)
|
163
|
* @see eu.etaxonomy.cdm.api.service.ITaxonService#loadTreeBranchTo(eu.etaxonomy.cdm.model.taxon.TaxonNode, eu.etaxonomy.cdm.model.name.Rank, java.util.List)
|
164
|
* FIXME Candidate for harmonization
|
165
|
* move to classification service
|
166
|
*/
|
167
|
@Override
|
168
|
public List<TaxonNode> loadTreeBranch(TaxonNode taxonNode, Rank baseRank, List<String> propertyPaths){
|
169
|
|
170
|
TaxonNode thisNode = taxonNodeDao.load(taxonNode.getUuid(), propertyPaths);
|
171
|
List<TaxonNode> pathToRoot = new ArrayList<TaxonNode>();
|
172
|
pathToRoot.add(thisNode);
|
173
|
|
174
|
while(!thisNode.isTopmostNode()){
|
175
|
//TODO why do we need to deproxy here?
|
176
|
// without this thisNode.getParent() will return NULL in
|
177
|
// some cases (environment dependend?) even if the parent exits
|
178
|
TaxonNode parentNode = CdmBase.deproxy(thisNode, TaxonNode.class).getParent();
|
179
|
|
180
|
if(parentNode == null){
|
181
|
throw new NullPointerException("taxonNode " + thisNode + " must have a parent since it is not top most");
|
182
|
}
|
183
|
if(parentNode.getTaxon() == null){
|
184
|
throw new NullPointerException("The taxon associated with taxonNode " + parentNode + " is NULL");
|
185
|
}
|
186
|
if(parentNode.getTaxon().getName() == null){
|
187
|
throw new NullPointerException("The name of the taxon associated with taxonNode " + parentNode + " is NULL");
|
188
|
}
|
189
|
|
190
|
Rank parentNodeRank = parentNode.getTaxon().getName() == null ? null : parentNode.getTaxon().getName().getRank();
|
191
|
// stop if the next parent is higher than the baseRank
|
192
|
if(baseRank != null && parentNodeRank != null && baseRank.isLower(parentNodeRank)){
|
193
|
break;
|
194
|
}
|
195
|
|
196
|
pathToRoot.add(parentNode);
|
197
|
thisNode = parentNode;
|
198
|
}
|
199
|
|
200
|
// initialize and invert order of nodes in list
|
201
|
defaultBeanInitializer.initializeAll(pathToRoot, propertyPaths);
|
202
|
Collections.reverse(pathToRoot);
|
203
|
|
204
|
return pathToRoot;
|
205
|
}
|
206
|
|
207
|
@Override
|
208
|
public List<TaxonNode> loadTreeBranchToTaxon(Taxon taxon, Classification classification, Rank baseRank, List<String> propertyPaths){
|
209
|
Classification tree = dao.load(classification.getUuid());
|
210
|
taxon = (Taxon) taxonDao.load(taxon.getUuid());
|
211
|
TaxonNode node = tree.getNode(taxon);
|
212
|
if(node == null){
|
213
|
logger.warn("The specified taxon is not found in the given tree.");
|
214
|
return null;
|
215
|
}
|
216
|
return loadTreeBranch(node, baseRank, propertyPaths);
|
217
|
}
|
218
|
|
219
|
|
220
|
@Override
|
221
|
public List<TaxonNode> loadChildNodesOfTaxonNode(TaxonNode taxonNode,
|
222
|
List<String> propertyPaths) {
|
223
|
taxonNode = taxonNodeDao.load(taxonNode.getUuid());
|
224
|
List<TaxonNode> childNodes = new ArrayList<TaxonNode>(taxonNode.getChildNodes());
|
225
|
defaultBeanInitializer.initializeAll(childNodes, propertyPaths);
|
226
|
Collections.sort(childNodes, taxonNodeComparator);
|
227
|
return childNodes;
|
228
|
}
|
229
|
|
230
|
@Override
|
231
|
public List<TaxonNode> listChildNodesOfTaxon(UUID taxonUuid, UUID classificationUuid, Integer pageSize,
|
232
|
Integer pageIndex, List<String> propertyPaths){
|
233
|
|
234
|
Classification classification = dao.load(classificationUuid);
|
235
|
Taxon taxon = (Taxon) taxonDao.load(taxonUuid);
|
236
|
|
237
|
List<TaxonNode> results = dao.listChildrenOf(taxon, classification, pageSize, pageIndex, propertyPaths);
|
238
|
Collections.sort(results, taxonNodeComparator); // FIXME this is only a HACK, order during the hibernate query in the dao
|
239
|
return results;
|
240
|
}
|
241
|
|
242
|
@Override
|
243
|
public Pager<TaxonNode> pageSiblingsOfTaxon(UUID taxonUuid, UUID classificationUuid, Integer pageSize,
|
244
|
Integer pageIndex, List<String> propertyPaths){
|
245
|
|
246
|
Classification classification = dao.load(classificationUuid);
|
247
|
Taxon taxon = (Taxon) taxonDao.load(taxonUuid);
|
248
|
|
249
|
long numberOfResults = dao.countSiblingsOf(taxon, classification);
|
250
|
|
251
|
List<TaxonNode> results;
|
252
|
if(PagerUtils.hasResultsInRange(numberOfResults, pageIndex, pageSize)) {
|
253
|
results = dao.listSiblingsOf(taxon, classification, pageSize, pageIndex, propertyPaths);
|
254
|
Collections.sort(results, taxonNodeComparator); // FIXME this is only a HACK, order during the hibernate query in the dao
|
255
|
} else {
|
256
|
results = new ArrayList<>();
|
257
|
}
|
258
|
|
259
|
return new DefaultPagerImpl<TaxonNode>(pageIndex, numberOfResults, pageSize, results);
|
260
|
}
|
261
|
|
262
|
@Override
|
263
|
public List<TaxonNode> listSiblingsOfTaxon(UUID taxonUuid, UUID classificationUuid, Integer pageSize,
|
264
|
Integer pageIndex, List<String> propertyPaths){
|
265
|
|
266
|
Pager<TaxonNode> pager = pageSiblingsOfTaxon(taxonUuid, classificationUuid, pageSize, pageIndex, propertyPaths);
|
267
|
return pager.getRecords();
|
268
|
}
|
269
|
|
270
|
@Override
|
271
|
public TaxonNode getTaxonNodeByUuid(UUID uuid) {
|
272
|
return taxonNodeDao.findByUuid(uuid);
|
273
|
}
|
274
|
|
275
|
@Override
|
276
|
public ITaxonTreeNode getTreeNodeByUuid(UUID uuid){
|
277
|
ITaxonTreeNode treeNode = taxonNodeDao.findByUuid(uuid);
|
278
|
if(treeNode == null){
|
279
|
treeNode = dao.findByUuid(uuid);
|
280
|
}
|
281
|
|
282
|
return treeNode;
|
283
|
}
|
284
|
|
285
|
@Override
|
286
|
public List<Classification> listClassifications(Integer limit, Integer start, List<OrderHint> orderHints, List<String> propertyPaths) {
|
287
|
return dao.list(limit, start, orderHints, propertyPaths);
|
288
|
}
|
289
|
|
290
|
@Override
|
291
|
public UUID removeTaxonNode(TaxonNode taxonNode) {
|
292
|
return taxonNodeDao.delete(taxonNode);
|
293
|
}
|
294
|
@Override
|
295
|
public UUID removeTreeNode(ITaxonTreeNode treeNode) {
|
296
|
if(treeNode instanceof Classification){
|
297
|
return dao.delete((Classification) treeNode);
|
298
|
}else if(treeNode instanceof TaxonNode){
|
299
|
return taxonNodeDao.delete((TaxonNode)treeNode);
|
300
|
}
|
301
|
return null;
|
302
|
}
|
303
|
@Override
|
304
|
public UUID saveTaxonNode(TaxonNode taxonNode) {
|
305
|
return taxonNodeDao.save(taxonNode).getUuid();
|
306
|
}
|
307
|
|
308
|
@Override
|
309
|
public Map<UUID, TaxonNode> saveTaxonNodeAll(
|
310
|
Collection<TaxonNode> taxonNodeCollection) {
|
311
|
return taxonNodeDao.saveAll(taxonNodeCollection);
|
312
|
}
|
313
|
|
314
|
@Override
|
315
|
public UUID saveTreeNode(ITaxonTreeNode treeNode) {
|
316
|
if(treeNode instanceof Classification){
|
317
|
return dao.save((Classification) treeNode).getUuid();
|
318
|
}else if(treeNode instanceof TaxonNode){
|
319
|
return taxonNodeDao.save((TaxonNode)treeNode).getUuid();
|
320
|
}
|
321
|
return null;
|
322
|
}
|
323
|
|
324
|
@Override
|
325
|
public List<TaxonNode> getAllNodes(){
|
326
|
return taxonNodeDao.list(null,null);
|
327
|
}
|
328
|
|
329
|
@Override
|
330
|
public List<UuidAndTitleCache<TaxonNode>> getTaxonNodeUuidAndTitleCacheOfAcceptedTaxaByClassification(UUID classificationUuid, List<UUID> excludeTaxa, Integer limit, String pattern) {
|
331
|
return taxonDao.getTaxonNodeUuidAndTitleCacheOfAcceptedTaxaByClassification(dao.load(classificationUuid), excludeTaxa, limit, pattern);
|
332
|
}
|
333
|
|
334
|
@Override
|
335
|
public List<UuidAndTitleCache<TaxonNode>> getTaxonNodeUuidAndTitleCacheOfAcceptedTaxaByClassification(Classification classification, List<UUID> excludeTaxa, Integer limit, String pattern) {
|
336
|
return taxonDao.getTaxonNodeUuidAndTitleCacheOfAcceptedTaxaByClassification(classification, excludeTaxa, limit, pattern);
|
337
|
}
|
338
|
|
339
|
@Override
|
340
|
public List<UuidAndTitleCache<TaxonNode>> getTaxonNodeUuidAndTitleCacheOfAcceptedTaxaByClassification(UUID classificationUuid, List<UUID> excludeTaxa) {
|
341
|
return taxonDao.getTaxonNodeUuidAndTitleCacheOfAcceptedTaxaByClassification(dao.load(classificationUuid), excludeTaxa, null, null);
|
342
|
}
|
343
|
|
344
|
@Override
|
345
|
public List<UuidAndTitleCache<TaxonNode>> getTaxonNodeUuidAndTitleCacheOfAcceptedTaxaByClassification(Classification classification, List<UUID> excludeTaxa) {
|
346
|
return taxonDao.getTaxonNodeUuidAndTitleCacheOfAcceptedTaxaByClassification(classification, excludeTaxa, null, null);
|
347
|
}
|
348
|
|
349
|
@Override
|
350
|
public List<UuidAndTitleCache<Classification>> getUuidAndTitleCache(Integer limit, String pattern) {
|
351
|
return dao.getUuidAndTitleCache(limit, pattern);
|
352
|
}
|
353
|
|
354
|
@Override
|
355
|
public Map<UUID, List<MediaRepresentation>> getAllMediaForChildNodes(
|
356
|
TaxonNode taxonNode, List<String> propertyPaths, int size,
|
357
|
int height, int widthOrDuration, String[] mimeTypes) {
|
358
|
|
359
|
TreeMap<UUID, List<MediaRepresentation>> result = new TreeMap<UUID, List<MediaRepresentation>>();
|
360
|
List<Media> taxonMedia = new ArrayList<Media>();
|
361
|
List<MediaRepresentation> mediaRepresentations = new ArrayList<MediaRepresentation>();
|
362
|
|
363
|
//add all media of the children to the result map
|
364
|
if (taxonNode != null){
|
365
|
|
366
|
List<TaxonNode> nodes = new ArrayList<TaxonNode>();
|
367
|
|
368
|
nodes.add(loadTaxonNode(taxonNode, propertyPaths));
|
369
|
nodes.addAll(loadChildNodesOfTaxonNode(taxonNode, propertyPaths));
|
370
|
|
371
|
if (nodes != null){
|
372
|
for(TaxonNode node : nodes){
|
373
|
Taxon taxon = node.getTaxon();
|
374
|
for (TaxonDescription taxonDescription: taxon.getDescriptions()){
|
375
|
for (DescriptionElementBase descriptionElement: taxonDescription.getElements()){
|
376
|
for(Media media : descriptionElement.getMedia()){
|
377
|
taxonMedia.add(media);
|
378
|
|
379
|
//find the best matching representation
|
380
|
mediaRepresentations.add(MediaUtils.findBestMatchingRepresentation(media,null, size, height, widthOrDuration, mimeTypes));
|
381
|
|
382
|
}
|
383
|
}
|
384
|
}
|
385
|
result.put(taxon.getUuid(), mediaRepresentations);
|
386
|
|
387
|
}
|
388
|
}
|
389
|
|
390
|
}
|
391
|
|
392
|
return result;
|
393
|
|
394
|
}
|
395
|
|
396
|
@Override
|
397
|
public Map<UUID, List<MediaRepresentation>> getAllMediaForChildNodes(Taxon taxon, Classification taxTree, List<String> propertyPaths, int size, int height, int widthOrDuration, String[] mimeTypes){
|
398
|
TaxonNode node = taxTree.getNode(taxon);
|
399
|
|
400
|
return getAllMediaForChildNodes(node, propertyPaths, size, height, widthOrDuration, mimeTypes);
|
401
|
}
|
402
|
|
403
|
@Override
|
404
|
@Transactional(readOnly = false)
|
405
|
public void updateTitleCache(Class<? extends Classification> clazz, Integer stepSize, IIdentifiableEntityCacheStrategy<Classification> cacheStrategy, IProgressMonitor monitor) {
|
406
|
if (clazz == null){
|
407
|
clazz = Classification.class;
|
408
|
}
|
409
|
super.updateTitleCacheImpl(clazz, stepSize, cacheStrategy, monitor);
|
410
|
}
|
411
|
|
412
|
/**
|
413
|
*
|
414
|
* @param allNodesOfClassification
|
415
|
* @return null - if allNodesOfClassification is empty <br>
|
416
|
*/
|
417
|
|
418
|
private Map<String, List<TaxonNode>> getSortedGenusList(Collection<TaxonNode> allNodesOfClassification){
|
419
|
|
420
|
if(allNodesOfClassification == null || allNodesOfClassification.isEmpty()){
|
421
|
return null;
|
422
|
}
|
423
|
Map<String, List<TaxonNode>> sortedGenusMap = new HashMap<String, List<TaxonNode>>();
|
424
|
for(TaxonNode node:allNodesOfClassification){
|
425
|
final TaxonNode tn = node;
|
426
|
Taxon taxon = node.getTaxon();
|
427
|
NonViralName name = CdmBase.deproxy(taxon.getName(), NonViralName.class);
|
428
|
String genusOrUninomial = name.getGenusOrUninomial();
|
429
|
//if rank unknown split string and take first word
|
430
|
if(genusOrUninomial == null){
|
431
|
String titleCache = taxon.getTitleCache();
|
432
|
String[] split = titleCache.split("\\s+");
|
433
|
for(String s:split){
|
434
|
genusOrUninomial = s;
|
435
|
break;
|
436
|
}
|
437
|
}
|
438
|
//if node has children
|
439
|
|
440
|
//retrieve list from map if not create List
|
441
|
if(sortedGenusMap.containsKey(genusOrUninomial)){
|
442
|
List<TaxonNode> list = sortedGenusMap.get(genusOrUninomial);
|
443
|
list.add(node);
|
444
|
sortedGenusMap.put(genusOrUninomial, list);
|
445
|
}else{
|
446
|
//create List for genus
|
447
|
List<TaxonNode> list = new ArrayList<TaxonNode>();
|
448
|
list.add(node);
|
449
|
sortedGenusMap.put(genusOrUninomial, list);
|
450
|
}
|
451
|
}
|
452
|
return sortedGenusMap;
|
453
|
}
|
454
|
|
455
|
/**
|
456
|
*
|
457
|
* creates new Classification and parent TaxonNodes at genus level
|
458
|
*
|
459
|
*
|
460
|
* @param map GenusMap which holds a name (Genus) and all the same Taxa as a list
|
461
|
* @param classification you want to improve the hierarchy (will not be modified)
|
462
|
* @param configurator to change certain settings, if null then standard settings will be taken
|
463
|
* @return new classification with parentNodes for each entry in the map
|
464
|
*/
|
465
|
@SuppressWarnings({ "rawtypes", "unchecked" })
|
466
|
@Transactional(readOnly = false)
|
467
|
@Override
|
468
|
public UpdateResult createHierarchyInClassification(Classification classification, CreateHierarchyForClassificationConfigurator configurator){
|
469
|
UpdateResult result = new UpdateResult();
|
470
|
classification = dao.findByUuid(classification.getUuid());
|
471
|
Map<String, List<TaxonNode>> map = getSortedGenusList(classification.getAllNodes());
|
472
|
|
473
|
final String APPENDIX = "repaired";
|
474
|
String titleCache = org.apache.commons.lang.StringUtils.isBlank(classification.getTitleCache()) ? " " : classification.getTitleCache() ;
|
475
|
//TODO classification clone???
|
476
|
Classification newClassification = Classification.NewInstance(titleCache +" "+ APPENDIX);
|
477
|
newClassification.setReference(classification.getReference());
|
478
|
|
479
|
for(Map.Entry<String, List<TaxonNode>> entry:map.entrySet()){
|
480
|
String genus = entry.getKey();
|
481
|
List<TaxonNode> listOfTaxonNodes = entry.getValue();
|
482
|
TaxonNode parentNode = null;
|
483
|
//Search for genus in list
|
484
|
for(TaxonNode tNode:listOfTaxonNodes){
|
485
|
//take that taxonNode as parent and remove from list with all it possible children
|
486
|
//FIXME NPE for name
|
487
|
TaxonNameBase name = tNode.getTaxon().getName();
|
488
|
NonViralName nonViralName = CdmBase.deproxy(name, NonViralName.class);
|
489
|
if(nonViralName.getNameCache().equalsIgnoreCase(genus)){
|
490
|
TaxonNode clone = (TaxonNode) tNode.clone();
|
491
|
if(!tNode.hasChildNodes()){
|
492
|
//FIXME remove classification
|
493
|
// parentNode = newClassification.addChildNode(clone, 0, classification.getCitation(), classification.getMicroReference());
|
494
|
parentNode = newClassification.addChildNode(clone, 0, clone.getReference(), clone.getMicroReference());
|
495
|
//remove taxonNode from list because just added to classification
|
496
|
result.addUpdatedObject(tNode);
|
497
|
listOfTaxonNodes.remove(tNode);
|
498
|
}else{
|
499
|
//get all childNodes
|
500
|
//save prior Hierarchy and remove them from the list
|
501
|
List<TaxonNode> copyAllChildrenToTaxonNode = copyAllChildrenToTaxonNode(tNode, clone, result);
|
502
|
// parentNode = newClassification.addChildNode(clone, 0, classification.getCitation(), classification.getMicroReference());
|
503
|
//FIXME remove classification
|
504
|
parentNode = newClassification.addChildNode(clone, 0, clone.getReference(), clone.getMicroReference());
|
505
|
//remove taxonNode from list because just added to classification
|
506
|
result.addUpdatedObject(tNode);
|
507
|
listOfTaxonNodes.remove(tNode);
|
508
|
if(copyAllChildrenToTaxonNode != null){
|
509
|
listOfTaxonNodes = (List<TaxonNode>) CollectionUtils.removeAll(listOfTaxonNodes, copyAllChildrenToTaxonNode);
|
510
|
}
|
511
|
}
|
512
|
break;
|
513
|
}
|
514
|
}
|
515
|
if(parentNode == null){
|
516
|
//if no match found in list, create parentNode
|
517
|
NonViralNameParserImpl parser = NonViralNameParserImpl.NewInstance();
|
518
|
NonViralName nonViralName = parser.parseFullName(genus);
|
519
|
TaxonNameBase taxonNameBase = nonViralName;
|
520
|
//TODO Sec via configurator
|
521
|
Taxon taxon = Taxon.NewInstance(taxonNameBase, null);
|
522
|
parentNode = newClassification.addChildTaxon(taxon, 0, null, null);
|
523
|
result.addUpdatedObject(parentNode);
|
524
|
}
|
525
|
//iterate over the rest of the list
|
526
|
for(TaxonNode tn : listOfTaxonNodes){
|
527
|
//if TaxonNode has a parent and this is not the classification then skip it
|
528
|
//and add to new classification via the parentNode as children of it
|
529
|
//this should assures to keep the already existing hierarchy
|
530
|
//FIXME: Assert is not rootnode --> entrypoint is not classification in future but rather rootNode
|
531
|
|
532
|
if(!tn.isTopmostNode()){
|
533
|
continue; //skip to next taxonNode
|
534
|
}
|
535
|
|
536
|
TaxonNode clone = (TaxonNode) tn.clone();
|
537
|
//FIXME: citation from node
|
538
|
//TODO: addchildNode without citation and references
|
539
|
// TaxonNode taxonNode = parentNode.addChildNode(clone, classification.getCitation(), classification.getMicroReference());
|
540
|
TaxonNode taxonNode = parentNode.addChildNode(clone, clone.getReference(), clone.getMicroReference());
|
541
|
result.addUnChangedObject(clone);
|
542
|
if(tn.hasChildNodes()){
|
543
|
//save hierarchy in new classification
|
544
|
List<TaxonNode> copyAllChildrenToTaxonNode = copyAllChildrenToTaxonNode(tn, taxonNode, result);
|
545
|
if(copyAllChildrenToTaxonNode != null){
|
546
|
listOfTaxonNodes = (List<TaxonNode>) CollectionUtils.removeAll(listOfTaxonNodes, copyAllChildrenToTaxonNode);
|
547
|
}
|
548
|
}
|
549
|
}
|
550
|
}
|
551
|
dao.saveOrUpdate(newClassification);
|
552
|
result.setCdmEntity(newClassification);
|
553
|
return result;
|
554
|
}
|
555
|
|
556
|
/**
|
557
|
*
|
558
|
* recursive method to get all childnodes of taxonNode in classification.
|
559
|
*
|
560
|
* @param classification just for References and Citation, can be null
|
561
|
* @param copyFromNode TaxonNode with Children
|
562
|
* @param copyToNode TaxonNode which will receive the children
|
563
|
* @return List of ChildNode which has been added. If node has no children returns null
|
564
|
*/
|
565
|
private List<TaxonNode> copyAllChildrenToTaxonNode(TaxonNode copyFromNode, TaxonNode copyToNode, UpdateResult result) {
|
566
|
List<TaxonNode> childNodes;
|
567
|
if(!copyFromNode.hasChildNodes()){
|
568
|
return null;
|
569
|
}else{
|
570
|
childNodes = copyFromNode.getChildNodes();
|
571
|
}
|
572
|
for(TaxonNode childNode:childNodes){
|
573
|
TaxonNode clone = (TaxonNode) childNode.clone();
|
574
|
result.addUnChangedObject(clone);
|
575
|
if(childNode.hasChildNodes()){
|
576
|
copyAllChildrenToTaxonNode(childNode, clone, result);
|
577
|
}
|
578
|
//FIXME: citation from node instead of classification
|
579
|
// copyToNode.addChildNode(clone,classification.getCitation(), classification.getMicroReference());
|
580
|
copyToNode.addChildNode(clone, clone.getReference(), clone.getMicroReference());
|
581
|
}
|
582
|
return childNodes;
|
583
|
}
|
584
|
|
585
|
/**
|
586
|
* {@inheritDoc}
|
587
|
*/
|
588
|
@Override
|
589
|
public ClassificationLookupDTO classificationLookup(Classification classification) {
|
590
|
return dao.classificationLookup(classification);
|
591
|
}
|
592
|
|
593
|
|
594
|
@Override
|
595
|
public DeleteResult delete(UUID classificationUuid, TaxonDeletionConfigurator config){
|
596
|
DeleteResult result = new DeleteResult();
|
597
|
Classification classification = dao.findByUuid(classificationUuid);
|
598
|
if (classification == null){
|
599
|
result.addException(new IllegalArgumentException("The classification does not exist in database."));
|
600
|
result.setAbort();
|
601
|
return result;
|
602
|
}
|
603
|
if (!classification.hasChildNodes()){
|
604
|
dao.delete(classification);
|
605
|
}
|
606
|
if (config.getTaxonNodeConfig().getChildHandling().equals(ChildHandling.DELETE) ){
|
607
|
TaxonNode root = classification.getRootNode();
|
608
|
taxonNodeDao.delete(root, true);
|
609
|
dao.delete(classification);
|
610
|
}
|
611
|
|
612
|
|
613
|
return result;
|
614
|
}
|
615
|
|
616
|
@Override
|
617
|
public List<GroupedTaxonDTO> groupTaxaByHigherTaxon(List<UUID> originalTaxonUuids, UUID classificationUuid, Rank minRank, Rank maxRank){
|
618
|
List<GroupedTaxonDTO> result = new ArrayList<>();
|
619
|
|
620
|
//get treeindex for each taxonUUID
|
621
|
Map<UUID, String> taxonIdTreeIndexMap = dao.treeIndexForTaxonUuids(classificationUuid, originalTaxonUuids);
|
622
|
|
623
|
//build treeindex tree or list
|
624
|
List<String> treeIndexClosure = new ArrayList<>();
|
625
|
for (String treeIndex : taxonIdTreeIndexMap.values()){
|
626
|
String[] splits = treeIndex.substring(1).split(ITreeNode.separator);
|
627
|
String currentIndex = ITreeNode.separator;
|
628
|
for (String split : splits){
|
629
|
if (split.equals("")){
|
630
|
continue;
|
631
|
}
|
632
|
currentIndex += split + ITreeNode.separator;
|
633
|
if (!treeIndexClosure.contains(currentIndex) && !split.startsWith(ITreeNode.treePrefix)){
|
634
|
treeIndexClosure.add(currentIndex);
|
635
|
}
|
636
|
}
|
637
|
}
|
638
|
|
639
|
//get rank sortindex for all parent taxa with sortindex <= minRank and sortIndex >= maxRank (if available)
|
640
|
Integer minRankOrderIndex = minRank == null ? null : minRank.getOrderIndex();
|
641
|
Integer maxRankOrderIndex = maxRank == null ? null : maxRank.getOrderIndex();
|
642
|
Map<String, Integer> treeIndexSortIndexMapTmp = taxonNodeDao.rankOrderIndexForTreeIndex(treeIndexClosure, minRankOrderIndex, maxRankOrderIndex);
|
643
|
|
644
|
//remove all treeindex with "exists child in above map(and child.sortindex > xxx)
|
645
|
List<String> treeIndexList = new ArrayList<>(treeIndexSortIndexMapTmp.keySet());
|
646
|
Collections.sort(treeIndexList, new TreeIndexComparator());
|
647
|
Map<String, Integer> treeIndexSortIndexMap = new HashMap<>();
|
648
|
String lastTreeIndex = null;
|
649
|
for (String treeIndex : treeIndexList){
|
650
|
if (lastTreeIndex != null && treeIndex.startsWith(lastTreeIndex)){
|
651
|
treeIndexSortIndexMap.remove(lastTreeIndex);
|
652
|
}
|
653
|
treeIndexSortIndexMap.put(treeIndex, treeIndexSortIndexMapTmp.get(treeIndex));
|
654
|
lastTreeIndex = treeIndex;
|
655
|
}
|
656
|
|
657
|
//get taxonID for treeIndexes
|
658
|
Map<String, UuidAndTitleCache<?>> treeIndexTaxonIdMap = taxonNodeDao.taxonUuidsForTreeIndexes(treeIndexSortIndexMap.keySet());
|
659
|
|
660
|
//fill result list
|
661
|
for (UUID originalTaxonUuid : originalTaxonUuids){
|
662
|
GroupedTaxonDTO item = new GroupedTaxonDTO();
|
663
|
result.add(item);
|
664
|
item.setTaxonUuid(originalTaxonUuid);
|
665
|
String groupIndex = taxonIdTreeIndexMap.get(originalTaxonUuid);
|
666
|
while (groupIndex != null){
|
667
|
if (treeIndexTaxonIdMap.get(groupIndex) != null){
|
668
|
UuidAndTitleCache<?> uuidAndLabel = treeIndexTaxonIdMap.get(groupIndex);
|
669
|
item.setGroupTaxonUuid(uuidAndLabel.getUuid());
|
670
|
item.setGroupTaxonName(uuidAndLabel.getTitleCache());
|
671
|
break;
|
672
|
}else{
|
673
|
int index = groupIndex.substring(0, groupIndex.length()-1).lastIndexOf(ITreeNode.separator);
|
674
|
groupIndex = index<0 ? null : groupIndex.substring(0, index+1);
|
675
|
}
|
676
|
}
|
677
|
}
|
678
|
|
679
|
return result;
|
680
|
}
|
681
|
|
682
|
}
|