1
|
/**
|
2
|
* Copyright (C) 2015 EDIT
|
3
|
* European Distributed Institute of Taxonomy
|
4
|
* http://www.e-taxonomy.eu
|
5
|
*
|
6
|
* The contents of this file are subject to the Mozilla Public License Version 1.1
|
7
|
* See LICENSE.TXT at the top of this package for the full license terms.
|
8
|
*/
|
9
|
package eu.etaxonomy.cdm.io.edaphobase;
|
10
|
|
11
|
import java.sql.ResultSet;
|
12
|
import java.sql.SQLException;
|
13
|
import java.util.HashMap;
|
14
|
import java.util.HashSet;
|
15
|
import java.util.List;
|
16
|
import java.util.Map;
|
17
|
import java.util.Set;
|
18
|
import java.util.UUID;
|
19
|
|
20
|
import org.apache.log4j.Logger;
|
21
|
import org.codehaus.plexus.util.StringUtils;
|
22
|
import org.springframework.stereotype.Component;
|
23
|
|
24
|
import eu.etaxonomy.cdm.io.common.IPartitionedIO;
|
25
|
import eu.etaxonomy.cdm.io.common.ImportHelper;
|
26
|
import eu.etaxonomy.cdm.io.common.ResultSetPartitioner;
|
27
|
import eu.etaxonomy.cdm.io.common.mapping.UndefinedTransformerMethodException;
|
28
|
import eu.etaxonomy.cdm.model.agent.Person;
|
29
|
import eu.etaxonomy.cdm.model.agent.Team;
|
30
|
import eu.etaxonomy.cdm.model.agent.TeamOrPersonBase;
|
31
|
import eu.etaxonomy.cdm.model.common.AnnotatableEntity;
|
32
|
import eu.etaxonomy.cdm.model.common.Annotation;
|
33
|
import eu.etaxonomy.cdm.model.common.AnnotationType;
|
34
|
import eu.etaxonomy.cdm.model.common.CdmBase;
|
35
|
import eu.etaxonomy.cdm.model.common.Language;
|
36
|
import eu.etaxonomy.cdm.model.common.Marker;
|
37
|
import eu.etaxonomy.cdm.model.common.MarkerType;
|
38
|
import eu.etaxonomy.cdm.model.common.OrderedTermVocabulary;
|
39
|
import eu.etaxonomy.cdm.model.common.Representation;
|
40
|
import eu.etaxonomy.cdm.model.name.IZoologicalName;
|
41
|
import eu.etaxonomy.cdm.model.name.NomenclaturalStatusType;
|
42
|
import eu.etaxonomy.cdm.model.name.Rank;
|
43
|
import eu.etaxonomy.cdm.model.name.RankClass;
|
44
|
import eu.etaxonomy.cdm.model.name.TaxonNameFactory;
|
45
|
import eu.etaxonomy.cdm.model.reference.Reference;
|
46
|
import eu.etaxonomy.cdm.model.reference.ReferenceFactory;
|
47
|
import eu.etaxonomy.cdm.model.taxon.Synonym;
|
48
|
import eu.etaxonomy.cdm.model.taxon.Taxon;
|
49
|
import eu.etaxonomy.cdm.model.taxon.TaxonBase;
|
50
|
import eu.etaxonomy.cdm.strategy.parser.NonViralNameParserImplRegExBase;
|
51
|
|
52
|
/**
|
53
|
* @author a.mueller
|
54
|
* @since 18.12.2015
|
55
|
*
|
56
|
*/
|
57
|
@Component
|
58
|
public class EdaphobaseTaxonImport extends EdaphobaseImportBase {
|
59
|
private static final long serialVersionUID = -9138378836474086070L;
|
60
|
private static final Logger logger = Logger.getLogger(EdaphobaseTaxonImport.class);
|
61
|
|
62
|
private static final String tableName = "tax_taxon";
|
63
|
|
64
|
private static final String pluralString = "taxa";
|
65
|
|
66
|
private static final Object AUTHOR_NAMESPACE = "tax_author_name";
|
67
|
|
68
|
/**
|
69
|
* @param tableName
|
70
|
* @param pluralString
|
71
|
*/
|
72
|
public EdaphobaseTaxonImport() {
|
73
|
super(tableName, pluralString);
|
74
|
}
|
75
|
|
76
|
@Override
|
77
|
protected String getIdQuery(EdaphobaseImportState state) {
|
78
|
return "SELECT DISTINCT taxon_id FROM tax_taxon t "
|
79
|
+ " ORDER BY taxon_id";
|
80
|
}
|
81
|
|
82
|
@Override
|
83
|
protected String getRecordQuery(EdaphobaseImportConfigurator config) {
|
84
|
String result = " SELECT DISTINCT t.*, r.value_summary as rankStr, pr.value_summary as parentRankStr, ppr.value_summary as grandParentRankStr, pppr.value_summary as grandGrandParentRankStr, "
|
85
|
+ " pt.name as parentName, ppt.name as grandParentName, pppt.name as grandGrandParentName "
|
86
|
+ " FROM tax_taxon t "
|
87
|
+ " LEFT JOIN tax_taxon pt ON t.parent_taxon_fk = pt.taxon_id "
|
88
|
+ " LEFT JOIN tax_taxon ppt ON pt.parent_taxon_fk = ppt.taxon_id "
|
89
|
+ " LEFT JOIN tax_taxon pppt ON ppt.parent_taxon_fk = pppt.taxon_id "
|
90
|
+ " LEFT OUTER JOIN selective_list.element r ON r.element_id = t.tax_rank_fk "
|
91
|
+ " LEFT OUTER JOIN selective_list.element pr ON pr.element_id = pt.tax_rank_fk "
|
92
|
+ " LEFT OUTER JOIN selective_list.element ppr ON ppr.element_id = ppt.tax_rank_fk "
|
93
|
+ " LEFT OUTER JOIN selective_list.element pppr ON pppr.element_id = pppt.tax_rank_fk "
|
94
|
+ " WHERE t.taxon_id IN (@IDSET)";
|
95
|
result = result.replace("@IDSET", IPartitionedIO.ID_LIST_TOKEN);
|
96
|
return result;
|
97
|
}
|
98
|
|
99
|
@Override
|
100
|
protected void doInvoke(EdaphobaseImportState state) {
|
101
|
makeIncludedInList(state);
|
102
|
super.doInvoke(state);
|
103
|
}
|
104
|
|
105
|
private Set<Integer> includedInTaxa = new HashSet<>();
|
106
|
|
107
|
/**
|
108
|
* @param state
|
109
|
*/
|
110
|
private void makeIncludedInList(EdaphobaseImportState state) {
|
111
|
String sql = "SELECT sr.a_taxon_fk_taxon_id "
|
112
|
+ " FROM tax_synonym sr "
|
113
|
+ " WHERE sr.synonym_role <> 11614 ";
|
114
|
ResultSet rs = state.getConfig().getSource().getResultSet(sql);
|
115
|
try {
|
116
|
while (rs.next()){
|
117
|
Integer synId = rs.getInt("a_taxon_fk_taxon_id");
|
118
|
includedInTaxa.add(synId);
|
119
|
}
|
120
|
} catch (SQLException e) {
|
121
|
// TODO Auto-generated catch block
|
122
|
e.printStackTrace();
|
123
|
}
|
124
|
}
|
125
|
|
126
|
@Override
|
127
|
public boolean doPartition(@SuppressWarnings("rawtypes") ResultSetPartitioner partitioner, EdaphobaseImportState state) {
|
128
|
ResultSet rs = partitioner.getResultSet();
|
129
|
@SuppressWarnings("rawtypes")
|
130
|
Set<TaxonBase> taxaToSave = new HashSet<>();
|
131
|
try {
|
132
|
while (rs.next()){
|
133
|
makeSingleTaxon(state, rs, taxaToSave);
|
134
|
}
|
135
|
} catch (SQLException | UndefinedTransformerMethodException e) {
|
136
|
e.printStackTrace();
|
137
|
}
|
138
|
|
139
|
getTaxonService().saveOrUpdate(taxaToSave);
|
140
|
return true;
|
141
|
}
|
142
|
|
143
|
/**
|
144
|
* @param state
|
145
|
* @param rs
|
146
|
* @param taxaToSave
|
147
|
* @throws SQLException
|
148
|
* @throws UndefinedTransformerMethodException
|
149
|
*/
|
150
|
private void makeSingleTaxon(EdaphobaseImportState state, ResultSet rs, Set<TaxonBase> taxaToSave)
|
151
|
throws SQLException, UndefinedTransformerMethodException {
|
152
|
Integer id = nullSafeInt(rs, "taxon_id");
|
153
|
Integer year = nullSafeInt(rs, "tax_year");
|
154
|
boolean isBrackets = rs.getBoolean("tax_brackets");
|
155
|
String remark = rs.getString("remark");
|
156
|
String nameStr = rs.getString("name");
|
157
|
String authorName = rs.getString("tax_author_name");
|
158
|
//parentTaxonFk
|
159
|
//rankFk
|
160
|
Integer nomRefId = nullSafeInt(rs, "tax_document");
|
161
|
boolean isValid = rs.getBoolean("valid");
|
162
|
boolean isDeleted = rs.getBoolean("deleted");
|
163
|
String displayString = rs.getString("display_string");
|
164
|
Integer version = nullSafeInt(rs, "versionfield");
|
165
|
String pages = rs.getString("pages");
|
166
|
String treeIndex = rs.getString("path_to_root");
|
167
|
// Integer rankFk = nullSafeInt(rs, "tax_rank_fk");
|
168
|
String nameAddition = rs.getString("name_addition");
|
169
|
String officialRemark = rs.getString("official_remark");
|
170
|
boolean isGroup = rs.getBoolean("taxonomic_group");
|
171
|
String rankStr = rs.getString("rankStr");
|
172
|
String parentRankStr = rs.getString("parentRankStr");
|
173
|
String grandParentRankStr = rs.getString("grandParentRankStr");
|
174
|
String grandGrandParentRankStr = rs.getString("grandGrandParentRankStr");
|
175
|
String parentNameStr = rs.getString("parentName");
|
176
|
String grandParentNameStr = rs.getString("grandParentName");
|
177
|
String grandGrandParentNameStr = rs.getString("grandGrandParentName");
|
178
|
String editUuid = rs.getString("edit_uuid");
|
179
|
|
180
|
|
181
|
if (isDeleted){
|
182
|
logger.warn("Deleted not handled according to mail Stephan 2018-03-07. ID: " + id );
|
183
|
return;
|
184
|
}
|
185
|
boolean nameAdditionUsed = isBlank(nameAddition);
|
186
|
if (!nameAdditionUsed){
|
187
|
nameAddition = nameAddition.trim();
|
188
|
}
|
189
|
|
190
|
isValid = checkValid(state, id, isValid);
|
191
|
|
192
|
//for debug only
|
193
|
if (id.equals(979370000) ){
|
194
|
logger.debug("now");
|
195
|
}
|
196
|
|
197
|
TaxonBase<?> taxonBase;
|
198
|
|
199
|
rankStr= extractEnglish(rankStr);
|
200
|
parentRankStr= extractEnglish(parentRankStr);
|
201
|
grandParentRankStr= extractEnglish(grandParentRankStr);
|
202
|
grandGrandParentRankStr= extractEnglish(grandGrandParentRankStr);
|
203
|
|
204
|
//Name etc.
|
205
|
Rank rank = makeRank(state, rankStr);
|
206
|
checkRankMarker(state, rank);
|
207
|
IZoologicalName name = TaxonNameFactory.NewZoologicalInstance(rank);
|
208
|
if (rank == null){
|
209
|
name.setNameCache(nameStr, true);
|
210
|
}else{
|
211
|
setNamePart(nameStr, rank, name);
|
212
|
Rank parentRank = makeRank(state, parentRankStr);
|
213
|
setNamePart(parentNameStr, parentRank, name);
|
214
|
Rank parentParentRank = makeRank(state, grandParentRankStr);
|
215
|
setNamePart(grandParentNameStr, parentParentRank, name);
|
216
|
Rank grandParentParentRank = makeRank(state, grandGrandParentRankStr);
|
217
|
setNamePart(grandGrandParentNameStr, grandParentParentRank, name);
|
218
|
if (grandParentParentRank != null && grandParentParentRank.isLower(Rank.GENUS()) || isBlank(name.getGenusOrUninomial()) && !name.isProtectedNameCache()){
|
219
|
logger.warn("Grand-Grandparent rank is lower than genus for " +
|
220
|
name.getTitleCache() + " (edapho-id: " + id + "; cdm-id: " + name.getId() + ")");
|
221
|
}
|
222
|
}
|
223
|
|
224
|
//Authors
|
225
|
if (isNotBlank(authorName)){
|
226
|
authorName = authorName.replace(" et ", " & ");
|
227
|
TeamOrPersonBase<?> author = state.getRelatedObject(AUTHOR_NAMESPACE, authorName, TeamOrPersonBase.class);
|
228
|
if (author == null){
|
229
|
logger.warn("Author not found in state: " + authorName);
|
230
|
}else{
|
231
|
if (isBrackets){
|
232
|
name.setBasionymAuthorship(author);
|
233
|
name.setOriginalPublicationYear(year);
|
234
|
}else{
|
235
|
name.setCombinationAuthorship(author);
|
236
|
name.setPublicationYear(year);
|
237
|
}
|
238
|
}
|
239
|
}
|
240
|
|
241
|
String capitalWord = NonViralNameParserImplRegExBase.capitalWord;
|
242
|
String autNam = "(" + capitalWord + "( in "+capitalWord+")?|Schuurmans Stekhoven|Winiszewska-Ślipińska|Fürst von Lieven|de Coninck|de Man|de Ley|de Grisse|"
|
243
|
+ "van der Linde|Pschorn-Walcher|van der Berg|J. Goddey)";
|
244
|
if (isNotBlank(nameAddition) && nameAddition.matches("(\\[|\\()?nomen.*")){
|
245
|
if ("(nomen oblitum)".equals(nameAddition) ){
|
246
|
name.addStatus(NomenclaturalStatusType.ZOO_OBLITUM(), null, null);
|
247
|
}else if ("nomen dubium".equals(nameAddition) || "[nomen dubium]".equals(nameAddition)){
|
248
|
name.addStatus(NomenclaturalStatusType.DOUBTFUL(), null, null);
|
249
|
}else if ("nomen nudum".equals(nameAddition)){
|
250
|
name.addStatus(NomenclaturalStatusType.NUDUM(), null, null);
|
251
|
}else if (nameAddition.matches("nomen nudum \\["+autNam+"\\, 19\\d{2}]")){
|
252
|
name.addStatus(NomenclaturalStatusType.NUDUM(), null, null);
|
253
|
Person nomNudAuthor = parseNomenNudumAuthor(state, name, nameAddition);
|
254
|
if (name.getCombinationAuthorship()!= null || name.getBasionymAuthorship() != null){
|
255
|
logger.warn("Author already exists for nomen nudum name with author. ID: " + id);
|
256
|
}
|
257
|
name.setCombinationAuthorship(nomNudAuthor);
|
258
|
}else{
|
259
|
logger.warn("'nomen xxx' name addition not recognized: " + nameAddition + ". ID: " + id);
|
260
|
}
|
261
|
nameAdditionUsed = true;
|
262
|
}
|
263
|
if (isNotBlank(nameAddition) && nameAddition.matches(autNam + "((, "+autNam+")? & " + autNam + ")?" + ", \\d{4}")){
|
264
|
nameAddition = nameAddition.replace(" et ", " & ");
|
265
|
int pos = nameAddition.length()-6;
|
266
|
String authorStr = nameAddition.substring(0, pos);
|
267
|
Integer naYear = Integer.valueOf(nameAddition.substring(pos + 2));
|
268
|
if (name.getPublicationYear() != null){
|
269
|
logger.warn("Publication year already exists. ID=" + id);
|
270
|
}
|
271
|
name.setPublicationYear(naYear);
|
272
|
TeamOrPersonBase<?> author = getNameAdditionAuthor(authorStr);
|
273
|
if (name.getCombinationAuthorship() != null){
|
274
|
logger.warn("Combination author already exists. ID=" + id);
|
275
|
}
|
276
|
name.setCombinationAuthorship(author);
|
277
|
nameAdditionUsed = true;
|
278
|
}
|
279
|
if (isNotBlank(nameAddition) && nameAddition.matches("(nec|non) " + capitalWord + ", \\d{4}")){
|
280
|
String str = nameAddition.substring(4);
|
281
|
String[] split = str.split(",");
|
282
|
IZoologicalName homonym = (IZoologicalName)name.clone();
|
283
|
homonym.setCombinationAuthorship(null);
|
284
|
homonym.setBasionymAuthorship(null);
|
285
|
homonym.setPublicationYear(null);
|
286
|
homonym.setOriginalPublicationYear(null);
|
287
|
TeamOrPersonBase<?> author = getNameAdditionAuthor(split[0]);
|
288
|
homonym.setCombinationAuthorship(author);
|
289
|
homonym.setPublicationYear(Integer.valueOf(split[1].trim()));
|
290
|
nameAdditionUsed = true;
|
291
|
}
|
292
|
|
293
|
//nomRef
|
294
|
if (nomRefId != null){
|
295
|
Reference nomRef = state.getRelatedObject(REFERENCE_NAMESPACE, String.valueOf(nomRefId), Reference.class);
|
296
|
if (nomRef == null){
|
297
|
logger.warn("Reference " + nomRefId + " could not be found");
|
298
|
}
|
299
|
name.setNomenclaturalReference(nomRef);
|
300
|
}
|
301
|
name.setNomenclaturalMicroReference(isBlank(pages)? null : pages);
|
302
|
|
303
|
//taxon
|
304
|
Reference secRef = state.getRelatedObject(REFERENCE_NAMESPACE, state.getConfig().getSecUuid().toString(), Reference.class);
|
305
|
if (secRef == null){
|
306
|
secRef = makeSecRef(state);
|
307
|
}
|
308
|
if (isValid){
|
309
|
taxonBase = Taxon.NewInstance(name, secRef);
|
310
|
}else{
|
311
|
taxonBase = Synonym.NewInstance(name, secRef);
|
312
|
}
|
313
|
handleTaxonomicGroupMarker(state, taxonBase, isGroup);
|
314
|
taxaToSave.add(taxonBase);
|
315
|
|
316
|
//sensu, auct.
|
317
|
if (isNotBlank(nameAddition) && (nameAddition.startsWith("sensu ") || "auct.".equals(nameAddition))){
|
318
|
nameAddition = nameAddition.replace(" et ", " & ");
|
319
|
taxonBase.setSec(null);
|
320
|
taxonBase.setAppendedPhrase(nameAddition);
|
321
|
//TODO
|
322
|
nameAdditionUsed = true;
|
323
|
}
|
324
|
|
325
|
//remarks
|
326
|
doNotes(taxonBase, remark, AnnotationType.TECHNICAL());
|
327
|
doNotes(taxonBase, officialRemark, AnnotationType.EDITORIAL());
|
328
|
|
329
|
//id
|
330
|
ImportHelper.setOriginalSource(taxonBase, state.getTransactionalSourceReference(), id, TAXON_NAMESPACE);
|
331
|
ImportHelper.setOriginalSource(name, state.getTransactionalSourceReference(), id, TAXON_NAMESPACE);
|
332
|
taxonBase.setUuid(UUID.fromString(editUuid));
|
333
|
handleExampleIdentifiers(taxonBase, id);
|
334
|
|
335
|
if (!nameAdditionUsed){
|
336
|
logger.warn("name_addition not recognized: " + nameAddition + ". ID="+id);
|
337
|
name.setAppendedPhrase(nameAddition);
|
338
|
}
|
339
|
|
340
|
if (titleCacheDiffers(state, displayString, name, taxonBase)){
|
341
|
String titleCache = taxonBase.getAppendedPhrase() != null ? taxonBase.getTitleCache() : name.getTitleCache();
|
342
|
logger.warn("Displaystring differs from titleCache. ID=" + id + ".\n " + displayString + "\n " + titleCache);
|
343
|
}
|
344
|
}
|
345
|
|
346
|
|
347
|
/**
|
348
|
* @param state
|
349
|
* @param displayString
|
350
|
* @param name
|
351
|
* @param taxonBase
|
352
|
* @return
|
353
|
*/
|
354
|
private boolean titleCacheDiffers(EdaphobaseImportState state, String displayString, IZoologicalName name, TaxonBase<?> taxonBase) {
|
355
|
String orig = displayString.replace("nomen nudum [Hirschmann, 1951]", "Hirschmann, 1951")
|
356
|
.replace(" ", " ");
|
357
|
String nameTitleCache = name.getTitleCache().replace("species group", "group");
|
358
|
String taxonTitleCache = taxonBase.getTitleCache().replace("species group", "group");
|
359
|
|
360
|
// if (state.getConfig().isIgnore4nomial() && orig.matches(".* subsp"))
|
361
|
boolean result =
|
362
|
!orig.equals(nameTitleCache)
|
363
|
&& !orig.equals(name.getFullTitleCache())
|
364
|
&& !orig.equals(taxonTitleCache);
|
365
|
return result;
|
366
|
}
|
367
|
|
368
|
/**
|
369
|
* @param authorStr
|
370
|
* @return
|
371
|
*/
|
372
|
private TeamOrPersonBase<?> getNameAdditionAuthor(String authorStr) {
|
373
|
TeamOrPersonBase<?> result;
|
374
|
String[] splits = authorStr.split("(, | & )");
|
375
|
if (splits.length == 1){
|
376
|
Person person = Person.NewInstance();
|
377
|
person.setNomenclaturalTitle(splits[0]);
|
378
|
result = person;
|
379
|
}else{
|
380
|
Team team = Team.NewInstance();
|
381
|
for (String split: splits){
|
382
|
Person person = Person.NewInstance();
|
383
|
person.setNomenclaturalTitle(split);
|
384
|
team.addTeamMember(person);
|
385
|
}
|
386
|
result = team;
|
387
|
}
|
388
|
//TODO deduplicate
|
389
|
return result;
|
390
|
}
|
391
|
|
392
|
/**
|
393
|
* @param state
|
394
|
* @param nameAddition
|
395
|
* @return
|
396
|
*/
|
397
|
private Person parseNomenNudumAuthor(EdaphobaseImportState state, IZoologicalName name, String nameAddition) {
|
398
|
nameAddition = nameAddition.replace("nomen nudum [", "").replace("tz, 195]", "tz, 1952]")
|
399
|
.replace("]", "");
|
400
|
String[] split = nameAddition.split(", ");
|
401
|
Integer year = Integer.valueOf(split[1]);
|
402
|
name.setPublicationYear(year);
|
403
|
//TODO deduplicate
|
404
|
Person author = Person.NewInstance();
|
405
|
author.setNomenclaturalTitle(split[0].trim());
|
406
|
return author;
|
407
|
}
|
408
|
|
409
|
/**
|
410
|
* @param state
|
411
|
* @param id
|
412
|
* @param isValid
|
413
|
* @return
|
414
|
*/
|
415
|
private boolean checkValid(EdaphobaseImportState state, Integer id, boolean isValid) {
|
416
|
if (isValid){
|
417
|
return isValid;
|
418
|
}else if (includedInTaxa.contains(id)){
|
419
|
return true;
|
420
|
}else{
|
421
|
return isValid;
|
422
|
}
|
423
|
}
|
424
|
|
425
|
/**
|
426
|
* @param rankStr
|
427
|
* @return
|
428
|
*/
|
429
|
private String extractEnglish(String rankStr) {
|
430
|
if (rankStr == null){
|
431
|
return null;
|
432
|
}
|
433
|
String[] splits = rankStr.split(", ");
|
434
|
if (splits.length != 3){
|
435
|
String message = "Wrong rank format: "+ rankStr;
|
436
|
logger.error(message);
|
437
|
return null;
|
438
|
}
|
439
|
return splits[1].trim();
|
440
|
}
|
441
|
|
442
|
|
443
|
static Map<Integer,UUID> idMap = new HashMap<>();
|
444
|
static{
|
445
|
idMap.put(86594, UUID.fromString("715c2370-45a4-450c-99f7-e196758979ca")); //Aporrectodea caliginosa
|
446
|
idMap.put(86593, UUID.fromString("230f1a69-5dcd-4829-a01c-17490a2fdf34")); //Aporrectodea
|
447
|
idMap.put(86684, UUID.fromString("0982dc0e-1a79-45a0-8abc-8166625b94b8")); //Achaeta
|
448
|
idMap.put(104328, UUID.fromString("15f0b5f8-44e4-4ae1-8b40-f36f0a049b27")); //Chamaedrilus
|
449
|
idMap.put(97537, UUID.fromString("899c62e3-a116-4c5b-b22a-c76e761cc32e")); //Araeolaimoides caecus
|
450
|
}
|
451
|
|
452
|
/**
|
453
|
* @param taxonBase
|
454
|
* @param id
|
455
|
*/
|
456
|
private void handleExampleIdentifiers(TaxonBase<?> taxonBase, Integer id) {
|
457
|
if (idMap.get(id) != null){
|
458
|
taxonBase.setUuid(idMap.get(id));
|
459
|
logger.warn("Override UUID for specific taxa. ID="+ id + "; uuid="+idMap.get(id) + "; name="+ taxonBase.getName().getTitleCache());
|
460
|
}
|
461
|
}
|
462
|
|
463
|
/**
|
464
|
* @param state
|
465
|
* @param rank
|
466
|
* @throws UndefinedTransformerMethodException
|
467
|
*/
|
468
|
private void checkRankMarker(EdaphobaseImportState state, Rank rank) throws UndefinedTransformerMethodException {
|
469
|
|
470
|
if (rank != null){
|
471
|
Set<Marker> markers = rank.getMarkers();
|
472
|
if ( markers.size() == 0){ //we assume that no markers exist, at least not for markers of unused ranks
|
473
|
UUID edaphoRankMarkerTypeUuid = state.getTransformer().getMarkerTypeUuid("EdaphoRankMarker");
|
474
|
MarkerType marker = getMarkerType(state, edaphoRankMarkerTypeUuid, "Edaphobase rank", "Rank used in Edaphobase", "EdaRk" );
|
475
|
Representation rep = Representation.NewInstance("Rang, verwendet in Edaphobase", "Edaphobase Rang", "EdaRg", Language.GERMAN());
|
476
|
marker.addRepresentation(rep);
|
477
|
rank.addMarker(Marker.NewInstance(marker, true));
|
478
|
getTermService().saveOrUpdate(rank);
|
479
|
}
|
480
|
}else{
|
481
|
logger.info("Rank is null and marker can not be set");
|
482
|
}
|
483
|
}
|
484
|
|
485
|
/**
|
486
|
* @param state
|
487
|
* @param isGroup
|
488
|
* @param taxonBase
|
489
|
*/
|
490
|
private void handleTaxonomicGroupMarker(EdaphobaseImportState state, TaxonBase<?> taxonBase, boolean isGroup) {
|
491
|
if (! isGroup){
|
492
|
return;
|
493
|
}else{
|
494
|
try {
|
495
|
MarkerType markerType = getMarkerType(state, state.getTransformer().getMarkerTypeUuid("TaxGrossgruppe"), "Tax. Gruppe", "Taxonomische Grossgruppe", "TGG", null, Language.GERMAN());
|
496
|
if (taxonBase.isInstanceOf(Synonym.class)){
|
497
|
logger.warn("Syonym is marked as 'taxonomische Grossgruppe'");
|
498
|
}
|
499
|
taxonBase.addMarker(Marker.NewInstance(markerType, true));
|
500
|
} catch (UndefinedTransformerMethodException e) {
|
501
|
}
|
502
|
}
|
503
|
}
|
504
|
|
505
|
/**
|
506
|
* @param state
|
507
|
* @return
|
508
|
*/
|
509
|
private Reference makeSecRef(EdaphobaseImportState state) {
|
510
|
Reference ref = ReferenceFactory.newDatabase();
|
511
|
ref.setTitle(state.getConfig().getEdaphobaseSecundumTitle());
|
512
|
ref.setUuid(state.getConfig().getSecUuid());
|
513
|
state.addRelatedObject(REFERENCE_NAMESPACE, ref.getUuid().toString(), ref);
|
514
|
getReferenceService().save(ref);
|
515
|
return ref;
|
516
|
}
|
517
|
|
518
|
@Override
|
519
|
public Map<Object, Map<String, ? extends CdmBase>> getRelatedObjectsForPartition(ResultSet rs,
|
520
|
EdaphobaseImportState state) {
|
521
|
Map<Object, Map<String, ? extends CdmBase>> result = new HashMap<>();
|
522
|
Map<String, TeamOrPersonBase<?>> authorMap = new HashMap<>();
|
523
|
Set<String> authorSet = new HashSet<>();
|
524
|
Set<String> referenceIdSet = new HashSet<>();
|
525
|
|
526
|
try {
|
527
|
while (rs.next()){
|
528
|
String authorStr = rs.getString("tax_author_name");
|
529
|
if (authorStr != null){
|
530
|
authorStr = authorStr.replace(" et ", " & ");
|
531
|
authorSet.add(authorStr);
|
532
|
}
|
533
|
handleForeignKey(rs, referenceIdSet, "tax_document");
|
534
|
}
|
535
|
} catch (SQLException e) {
|
536
|
e.printStackTrace();
|
537
|
}
|
538
|
|
539
|
//Authors
|
540
|
Set<UUID> uuidSet = new HashSet<>();
|
541
|
for (String authorStr : authorSet){
|
542
|
UUID uuid = state.getAuthorUuid(authorStr);
|
543
|
uuidSet.add(uuid);
|
544
|
}
|
545
|
List<TeamOrPersonBase<?>> authors = (List)getAgentService().find(uuidSet);
|
546
|
Map<UUID, TeamOrPersonBase<?>> authorUuidMap = new HashMap<>();
|
547
|
for (TeamOrPersonBase<?> author : authors){
|
548
|
authorUuidMap.put(author.getUuid(), author);
|
549
|
}
|
550
|
|
551
|
for (String authorStr : authorSet){
|
552
|
UUID uuid = state.getAuthorUuid(authorStr);
|
553
|
TeamOrPersonBase<?> author = authorUuidMap.get(uuid);
|
554
|
authorMap.put(authorStr, author);
|
555
|
}
|
556
|
result.put(AUTHOR_NAMESPACE, authorMap);
|
557
|
|
558
|
//reference map
|
559
|
String nameSpace = REFERENCE_NAMESPACE;
|
560
|
Class<?> cdmClass = Reference.class;
|
561
|
Set<String> idSet = referenceIdSet;
|
562
|
Map<String, Reference> referenceMap = (Map<String, Reference>)getCommonService().getSourcedObjectsByIdInSource(cdmClass, idSet, nameSpace);
|
563
|
result.put(nameSpace, referenceMap);
|
564
|
|
565
|
//secundum
|
566
|
UUID secUuid = state.getConfig().getSecUuid();
|
567
|
Reference secRef = getReferenceService().find(secUuid);
|
568
|
referenceMap.put(secUuid.toString(), secRef);
|
569
|
|
570
|
return result;
|
571
|
}
|
572
|
|
573
|
private void setNamePart(String nameStr, Rank rank, IZoologicalName name) {
|
574
|
if (rank != null){
|
575
|
if (rank.isSupraGeneric() || rank.isGenus()){
|
576
|
if (isBlank(name.getGenusOrUninomial())){
|
577
|
name.setGenusOrUninomial(nameStr);
|
578
|
}
|
579
|
}else if (rank.isInfraGenericButNotSpeciesGroup()){
|
580
|
if (isBlank(name.getInfraGenericEpithet())){
|
581
|
name.setInfraGenericEpithet(nameStr);
|
582
|
}
|
583
|
}else if (rank.isSpeciesAggregate() || rank.isSpecies()){
|
584
|
if (isBlank(name.getSpecificEpithet())){
|
585
|
name.setSpecificEpithet(nameStr);
|
586
|
}
|
587
|
}else if (rank.isInfraSpecific()){
|
588
|
if (isBlank(name.getInfraSpecificEpithet())){
|
589
|
name.setInfraSpecificEpithet(nameStr);
|
590
|
}
|
591
|
}
|
592
|
}
|
593
|
}
|
594
|
|
595
|
private Rank makeRank(EdaphobaseImportState state, String rankStr) {
|
596
|
Rank rank = null;
|
597
|
try {
|
598
|
rank = state.getTransformer().getRankByKey(rankStr);
|
599
|
if (rank == null && rankStr != null){
|
600
|
if (rankStr.equals("Cohort")){
|
601
|
//position not really clear #7285
|
602
|
Rank lowerRank = Rank.SUPERORDER();
|
603
|
rank = this.getRank(state, Rank.uuidCohort, "Cohort", "Cohort", null,
|
604
|
(OrderedTermVocabulary<Rank>)Rank.GENUS().getVocabulary(),
|
605
|
lowerRank, RankClass.Suprageneric);
|
606
|
}else if (rankStr.equals("Hyporder")){
|
607
|
rank = this.getRank(state, Rank.uuidHyporder, "Hyporder", "Hyporder", null,
|
608
|
(OrderedTermVocabulary<Rank>)Rank.GENUS().getVocabulary(),
|
609
|
Rank.SUBORDER(), RankClass.Suprageneric);
|
610
|
}
|
611
|
}
|
612
|
} catch (UndefinedTransformerMethodException e) {
|
613
|
e.printStackTrace();
|
614
|
}
|
615
|
return rank;
|
616
|
}
|
617
|
|
618
|
protected void doNotes(AnnotatableEntity annotatableEntity, String notes, AnnotationType type) {
|
619
|
if (StringUtils.isNotBlank(notes) && annotatableEntity != null ){
|
620
|
String notesString = String.valueOf(notes);
|
621
|
if (notesString.length() > 65530 ){
|
622
|
notesString = notesString.substring(0, 65530) + "...";
|
623
|
logger.warn("Notes string is longer than 65530 and was truncated: " + annotatableEntity);
|
624
|
}
|
625
|
Annotation notesAnnotation = Annotation.NewInstance(notesString, Language.UNDETERMINED());
|
626
|
//notesAnnotation.setAnnotationType(AnnotationType.EDITORIAL());
|
627
|
//notes.setCommentator(bmiConfig.getCommentator());
|
628
|
annotatableEntity.addAnnotation(notesAnnotation);
|
629
|
}
|
630
|
}
|
631
|
|
632
|
@Override
|
633
|
protected boolean doCheck(EdaphobaseImportState state) {
|
634
|
return true;
|
635
|
}
|
636
|
|
637
|
@Override
|
638
|
protected boolean isIgnore(EdaphobaseImportState state) {
|
639
|
return ! state.getConfig().isDoTaxa();
|
640
|
}
|
641
|
|
642
|
}
|