1
|
/**
|
2
|
* Copyright (C) 2007 EDIT
|
3
|
* European Distributed Institute of Taxonomy
|
4
|
* http://www.e-taxonomy.eu
|
5
|
*
|
6
|
* The contents of this file are subject to the Mozilla Public License Version 1.1
|
7
|
* See LICENSE.TXT at the top of this package for the full license terms.
|
8
|
*/
|
9
|
|
10
|
package eu.etaxonomy.cdm.io.iapt;
|
11
|
|
12
|
import java.util.ArrayList;
|
13
|
import java.util.Arrays;
|
14
|
import java.util.HashMap;
|
15
|
import java.util.HashSet;
|
16
|
import java.util.List;
|
17
|
import java.util.Map;
|
18
|
import java.util.Set;
|
19
|
import java.util.UUID;
|
20
|
import java.util.regex.Matcher;
|
21
|
import java.util.regex.Pattern;
|
22
|
|
23
|
import org.apache.commons.lang.ArrayUtils;
|
24
|
import org.apache.commons.lang.StringEscapeUtils;
|
25
|
import org.apache.commons.lang.StringUtils;
|
26
|
import org.apache.log4j.Level;
|
27
|
import org.apache.log4j.Logger;
|
28
|
import org.joda.time.DateTimeFieldType;
|
29
|
import org.joda.time.Partial;
|
30
|
import org.joda.time.format.DateTimeFormat;
|
31
|
import org.joda.time.format.DateTimeFormatter;
|
32
|
import org.springframework.stereotype.Component;
|
33
|
|
34
|
import com.fasterxml.jackson.core.JsonProcessingException;
|
35
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
36
|
|
37
|
import eu.etaxonomy.cdm.api.facade.DerivedUnitFacade;
|
38
|
import eu.etaxonomy.cdm.common.CdmUtils;
|
39
|
import eu.etaxonomy.cdm.io.mexico.SimpleExcelTaxonImport;
|
40
|
import eu.etaxonomy.cdm.io.mexico.SimpleExcelTaxonImportState;
|
41
|
import eu.etaxonomy.cdm.model.agent.Institution;
|
42
|
import eu.etaxonomy.cdm.model.agent.Person;
|
43
|
import eu.etaxonomy.cdm.model.agent.TeamOrPersonBase;
|
44
|
import eu.etaxonomy.cdm.model.common.Annotation;
|
45
|
import eu.etaxonomy.cdm.model.common.AnnotationType;
|
46
|
import eu.etaxonomy.cdm.model.common.DefinedTermBase;
|
47
|
import eu.etaxonomy.cdm.model.common.Extension;
|
48
|
import eu.etaxonomy.cdm.model.common.ExtensionType;
|
49
|
import eu.etaxonomy.cdm.model.common.IdentifiableSource;
|
50
|
import eu.etaxonomy.cdm.model.common.Language;
|
51
|
import eu.etaxonomy.cdm.model.common.LanguageString;
|
52
|
import eu.etaxonomy.cdm.model.common.Marker;
|
53
|
import eu.etaxonomy.cdm.model.common.MarkerType;
|
54
|
import eu.etaxonomy.cdm.model.common.OriginalSourceType;
|
55
|
import eu.etaxonomy.cdm.model.common.TimePeriod;
|
56
|
import eu.etaxonomy.cdm.model.name.BotanicalName;
|
57
|
import eu.etaxonomy.cdm.model.name.NameRelationshipType;
|
58
|
import eu.etaxonomy.cdm.model.name.NameTypeDesignation;
|
59
|
import eu.etaxonomy.cdm.model.name.NomenclaturalCode;
|
60
|
import eu.etaxonomy.cdm.model.name.NomenclaturalStatus;
|
61
|
import eu.etaxonomy.cdm.model.name.NomenclaturalStatusType;
|
62
|
import eu.etaxonomy.cdm.model.name.Rank;
|
63
|
import eu.etaxonomy.cdm.model.name.RankClass;
|
64
|
import eu.etaxonomy.cdm.model.name.SpecimenTypeDesignationStatus;
|
65
|
import eu.etaxonomy.cdm.model.name.TaxonNameBase;
|
66
|
import eu.etaxonomy.cdm.model.occurrence.Collection;
|
67
|
import eu.etaxonomy.cdm.model.occurrence.DerivedUnit;
|
68
|
import eu.etaxonomy.cdm.model.occurrence.FieldUnit;
|
69
|
import eu.etaxonomy.cdm.model.occurrence.GatheringEvent;
|
70
|
import eu.etaxonomy.cdm.model.occurrence.SpecimenOrObservationType;
|
71
|
import eu.etaxonomy.cdm.model.reference.Reference;
|
72
|
import eu.etaxonomy.cdm.model.reference.ReferenceFactory;
|
73
|
import eu.etaxonomy.cdm.model.taxon.Classification;
|
74
|
import eu.etaxonomy.cdm.model.taxon.ITaxonTreeNode;
|
75
|
import eu.etaxonomy.cdm.model.taxon.Synonym;
|
76
|
import eu.etaxonomy.cdm.model.taxon.SynonymType;
|
77
|
import eu.etaxonomy.cdm.model.taxon.Taxon;
|
78
|
import eu.etaxonomy.cdm.model.taxon.TaxonNode;
|
79
|
import eu.etaxonomy.cdm.strategy.parser.NonViralNameParserImpl;
|
80
|
|
81
|
/**
|
82
|
* @author a.mueller
|
83
|
* @created 05.01.2016
|
84
|
*/
|
85
|
|
86
|
@Component("iAPTExcelImport")
|
87
|
public class IAPTExcelImport<CONFIG extends IAPTImportConfigurator> extends SimpleExcelTaxonImport<CONFIG> {
|
88
|
private static final long serialVersionUID = -747486709409732371L;
|
89
|
private static final Logger logger = Logger.getLogger(IAPTExcelImport.class);
|
90
|
public static final String ANNOTATION_MARKER_STRING = "[*]";
|
91
|
|
92
|
|
93
|
private static UUID ROOT_UUID = UUID.fromString("4137fd2a-20f6-4e70-80b9-f296daf51d82");
|
94
|
|
95
|
private static NonViralNameParserImpl nameParser = NonViralNameParserImpl.NewInstance();
|
96
|
|
97
|
private final static String REGISTRATIONNO_PK= "RegistrationNo_Pk";
|
98
|
private final static String HIGHERTAXON= "HigherTaxon";
|
99
|
private final static String FULLNAME= "FullName";
|
100
|
private final static String AUTHORSSPELLING= "AuthorsSpelling";
|
101
|
private final static String LITSTRING= "LitString";
|
102
|
private final static String REGISTRATION= "Registration";
|
103
|
private final static String TYPE= "Type";
|
104
|
private final static String CAVEATS= "Caveats";
|
105
|
private final static String FULLBASIONYM= "FullBasionym";
|
106
|
private final static String FULLSYNSUBST= "FullSynSubst";
|
107
|
private final static String NOTESTXT= "NotesTxt";
|
108
|
private final static String REGDATE= "RegDate";
|
109
|
private final static String NAMESTRING= "NameString";
|
110
|
private final static String BASIONYMSTRING= "BasionymString";
|
111
|
private final static String SYNSUBSTSTR= "SynSubstStr";
|
112
|
private final static String AUTHORSTRING= "AuthorString";
|
113
|
|
114
|
private static List<String> expectedKeys= Arrays.asList(new String[]{
|
115
|
REGISTRATIONNO_PK, HIGHERTAXON, FULLNAME, AUTHORSSPELLING, LITSTRING, REGISTRATION, TYPE, CAVEATS, FULLBASIONYM, FULLSYNSUBST, NOTESTXT, REGDATE, NAMESTRING, BASIONYMSTRING, SYNSUBSTSTR, AUTHORSTRING});
|
116
|
|
117
|
private static final Pattern nomRefTokenizeP = Pattern.compile("^(?<title>.*):\\s(?<detail>[^\\.:]+)\\.(?<date>.*?)(?:\\s\\((?<issue>[^\\)]*)\\)\\s*)?\\.?$");
|
118
|
private static final Pattern[] datePatterns = new Pattern[]{
|
119
|
// NOTE:
|
120
|
// The order of the patterns is extremely important!!!
|
121
|
//
|
122
|
// all patterns cover the years 1700 - 1999
|
123
|
Pattern.compile("^(?<year>1[7,8,9][0-9]{2})$"), // only year, like '1969'
|
124
|
Pattern.compile("^(?<monthName>\\p{L}+\\.?)\\s(?<day>[0-9]{1,2})(?:st|rd|th)?\\.?,?\\s(?<year>(?:1[7,8,9])?[0-9]{2})$"), // full date like April 12, 1969 or april 12th 1999
|
125
|
Pattern.compile("^(?<monthName>\\p{L}+\\.?),?\\s?(?<year>(?:1[7,8,9])?[0-9]{2})$"), // April 99 or April, 1999 or Apr. 12
|
126
|
Pattern.compile("^(?<day>[0-9]{1,2})([\\.\\-/])(\\s?)(?<month>[0-1]?[0-9])\\2\\3(?<year>(?:1[7,8,9])?[0-9]{2})$"), // full date like 12.04.1969 or 12. 04. 1969 or 12/04/1969 or 12-04-1969
|
127
|
Pattern.compile("^(?<day>[0-9]{1,2})([\\.\\-/])(?<monthName>[IVX]{1,2})\\2(?<year>(?:1[7,8,9])?[0-9]{2})$"), // full date like 12-VI-1969
|
128
|
Pattern.compile("^(?:(?<day>[0-9]{1,2})(?:\\sde)?\\s)?(?<monthName>\\p{L}+)(?:\\sde)?\\s(?<year>(?:1[7,8,9])?[0-9]{2})$"), // full and partial date like 12 de Enero de 1999 or Enero de 1999
|
129
|
Pattern.compile("^(?<month>[0-1]?[0-9])([\\.\\-/])(?<year>(?:1[7,8,9])?[0-9]{2})$"), // partial date like 04.1969 or 04/1969 or 04-1969
|
130
|
Pattern.compile("^(?<year>(?:1[7,8,9])?[0-9]{2})([\\.\\-/])(?<month>[0-1]?[0-9])$"),// partial date like 1999-04
|
131
|
Pattern.compile("^(?<monthName>[IVX]{1,2})([\\.\\-/])(?<year>(?:1[7,8,9])?[0-9]{2})$"), // partial date like VI-1969
|
132
|
Pattern.compile("^(?<day>[0-9]{1,2})(?:[\\./]|th|rd|st)?\\s(?<monthName>\\p{L}+\\.?),?\\s?(?<year>(?:1[7,8,9])?[0-9]{2})$"), // full date like 12. April 1969 or april 1999 or 22 Dec.1999
|
133
|
};
|
134
|
protected static final Pattern typeSpecimenSplitPattern = Pattern.compile("^(?:\"*[Tt]ype: (?<fieldUnit>.*?))(?:[Hh]olotype:(?<holotype>.*?)\\.?)?(?:[Ii]sotype[^:]*:(?<isotype>.*)\\.?)?\\.?$");
|
135
|
|
136
|
private static final Pattern typeNameBasionymPattern = Pattern.compile("\\([Bb]asionym\\s?\\:\\s?(?<basionymName>[^\\)]*).*$");
|
137
|
private static final Pattern typeNameNotePattern = Pattern.compile("\\[([^\\[]*)"); // matches the inner of '[...]'
|
138
|
private static final Pattern typeNameSpecialSplitPattern = Pattern.compile("(?<note>.*\\;.*?)\\:(?<agent>)\\;(<name>.*)");
|
139
|
|
140
|
protected static final Pattern collectorPattern = Pattern.compile(".*?(?<fullStr1>\\([Ll]eg\\.\\s+(?<data1>[^\\)]*)\\)).*$|.*?(?<fullStr2>\\s[Ll]eg\\.\\:?\\s+(?<data2>.*?)\\.?)$|^(?<fullStr3>[Ll]eg\\.\\:?\\s+(?<data3>.*?)\\.?)");
|
141
|
private static final Pattern collectionDataPattern = Pattern.compile("^(?<collector>[^,]*),\\s?(?<detail>.*?)\\.?$");
|
142
|
private static final Pattern collectorsNumber = Pattern.compile("^([nN]o\\.\\s.*)$");
|
143
|
|
144
|
// AccessionNumbers: , #.*, n°:?, 96/3293, No..*, -?\w{1,3}-[0-9\-/]*
|
145
|
private static final Pattern accessionNumberOnlyPattern = Pattern.compile("^(?<accNumber>(?:n°\\:?\\s?|#|No\\.?\\s?)?[\\d\\w\\-/]*)$");
|
146
|
|
147
|
private static final Pattern[] specimenTypePatterns = new Pattern[]{
|
148
|
Pattern.compile("^(?<colCode>[A-Z]+|CPC Micropaleontology Lab\\.?)\\s+(?:\\((?<institute>.*[^\\)])\\))(?<accNumber>.*)?$"), // like: GAUF (Gansu Agricultural University) No. 1207-1222
|
149
|
Pattern.compile("^(?<colCode>[A-Z]+|CPC Micropaleontology Lab\\.?)\\s+(?:Coll\\.\\s(?<subCollection>[^\\.,;]*)(.))(?<accNumber>.*)?$"), // like KASSEL Coll. Krasske, Praep. DII 78
|
150
|
Pattern.compile("^(?:in\\s)?(?<institute>[Cc]oll\\.\\s.*?)(?:\\s+(?<accNumber>(Praep\\.|slide|No\\.|Inv\\. Nr\\.|Nr\\.).*))?$"), // like Coll. Lange-Bertalot, Bot. Inst., Univ. Frankfurt/Main, Germany Praep. Neukaledonien OTL 62
|
151
|
Pattern.compile("^(?<institute>Inst\\.\\s.*?)\\s+(?<accNumber>N\\s.*)?$"), // like Inst. Geological Sciences, Acad. Sci. Belarus, Minsk N 212 A
|
152
|
Pattern.compile("^(?<colCode>[A-Z]+)(?:\\s+(?<accNumber>.*))?$"), // identifies the Collection code and takes the rest as accessionNumber if any
|
153
|
};
|
154
|
|
155
|
|
156
|
private static final Pattern registrationPattern = Pattern.compile("^Registration date\\:\\s(?<regdate>\\d\\d\\.\\d\\d\\.\\d\\d); no\\.\\:\\s(?<regid>\\d+);\\soffice\\:\\s(?<office>.*?)\\.(?:\\s\\[Form no\\.\\:\\s(?<formNo>d+)\\])?$"); // Registration date: 29.06.98; no.: 2922; office: Berlin.
|
157
|
|
158
|
private static Map<String, Integer> monthFromNameMap = new HashMap<>();
|
159
|
|
160
|
static {
|
161
|
String[] ck = new String[]{"leden", "únor", "březen", "duben", "květen", "červen", "červenec ", "srpen", "září", "říjen", "listopad", "prosinec"};
|
162
|
String[] fr = new String[]{"janvier", "février", "mars", "avril", "mai", "juin", "juillet", "août", "septembre", "octobre", "novembre", "décembre"};
|
163
|
String[] de = new String[]{"januar", "februar", "märz", "april", "mai", "juni", "juli", "august", "september", "oktober", "november", "dezember"};
|
164
|
String[] en = new String[]{"january", "february", "march", "april", "may", "june", "july", "august", "september", "october", "november", "december"};
|
165
|
String[] it = new String[]{"gennaio", "febbraio", "marzo", "aprile", "maggio", "giugno", "luglio", "agosto", "settembre", "ottobre", "novembre", "dicembre"};
|
166
|
String[] sp = new String[]{"enero", "febrero", "marzo", "abril", "mayo", "junio", "julio", "agosto", "septiembre", "octubre", "noviembre", "diciembre"};
|
167
|
String[] de_abbrev = new String[]{"jan.", "feb.", "märz", "apr.", "mai", "jun.", "jul.", "aug.", "sept.", "okt.", "nov.", "dez."};
|
168
|
String[] en_abbrev = new String[]{"jan.", "feb.", "mar.", "apr.", "may", "jun.", "jul.", "aug.", "sep.", "oct.", "nov.", "dec."};
|
169
|
String[] port = new String[]{"Janeiro", "Fevereiro", "Março", "Abril", "Maio", "Junho", "Julho", "Agosto", "Setembro", "Outubro", "Novembro", "Dezembro"};
|
170
|
String[] rom_num = new String[]{"i", "ii", "iii", "iv", "v", "vi", "vii", "viii", "ix", "x", "xi", "xii"};
|
171
|
|
172
|
String[][] perLang = new String[][]{ck, de, fr, en, it, sp, port, de_abbrev, en_abbrev, rom_num};
|
173
|
|
174
|
for (String[] months: perLang) {
|
175
|
for(int m = 1; m < 13; m++){
|
176
|
monthFromNameMap.put(months[m - 1].toLowerCase(), m);
|
177
|
}
|
178
|
}
|
179
|
|
180
|
// special cases
|
181
|
monthFromNameMap.put("mar", 3);
|
182
|
monthFromNameMap.put("dec", 12);
|
183
|
monthFromNameMap.put("februari", 2);
|
184
|
monthFromNameMap.put("març", 3);
|
185
|
}
|
186
|
|
187
|
|
188
|
DateTimeFormatter formatterYear = DateTimeFormat.forPattern("yyyy");
|
189
|
|
190
|
private Map<String, Collection> collectionMap = new HashMap<>();
|
191
|
|
192
|
private ExtensionType extensionTypeIAPTRegData = null;
|
193
|
|
194
|
private Set<String> nameSet = new HashSet<>();
|
195
|
private DefinedTermBase duplicateRegistration = null;
|
196
|
|
197
|
enum TypesName {
|
198
|
fieldUnit, holotype, isotype;
|
199
|
|
200
|
public SpecimenTypeDesignationStatus status(){
|
201
|
switch (this) {
|
202
|
case holotype:
|
203
|
return SpecimenTypeDesignationStatus.HOLOTYPE();
|
204
|
case isotype:
|
205
|
return SpecimenTypeDesignationStatus.ISOTYPE();
|
206
|
default:
|
207
|
return null;
|
208
|
}
|
209
|
}
|
210
|
}
|
211
|
|
212
|
private MarkerType markerTypeFossil = null;
|
213
|
private Rank rankUnrankedSupraGeneric = null;
|
214
|
private Rank familyIncertisSedis = null;
|
215
|
private AnnotationType annotationTypeCaveats = null;
|
216
|
|
217
|
private Reference bookVariedadesTradicionales = null;
|
218
|
|
219
|
/**
|
220
|
* HACK for unit simple testing
|
221
|
*/
|
222
|
boolean _testMode = System.getProperty("TEST_MODE") != null;
|
223
|
|
224
|
private Taxon makeTaxon(HashMap<String, String> record, SimpleExcelTaxonImportState<CONFIG> state,
|
225
|
TaxonNode higherTaxonNode, boolean isFossil) {
|
226
|
|
227
|
String regNumber = getValue(record, REGISTRATIONNO_PK, false);
|
228
|
String regStr = getValue(record, REGISTRATION, true);
|
229
|
String titleCacheStr = getValue(record, FULLNAME, true);
|
230
|
String nameStr = getValue(record, NAMESTRING, true);
|
231
|
String authorStr = getValue(record, AUTHORSTRING, true);
|
232
|
String nomRefStr = getValue(record, LITSTRING, true);
|
233
|
String authorsSpelling = getValue(record, AUTHORSSPELLING, true);
|
234
|
String notesTxt = getValue(record, NOTESTXT, true);
|
235
|
String caveats = getValue(record, CAVEATS, true);
|
236
|
String fullSynSubstStr = getValue(record, FULLSYNSUBST, true);
|
237
|
String fullBasionymStr = getValue(record, FULLBASIONYM, true);
|
238
|
String basionymNameStr = getValue(record, FULLBASIONYM, true);
|
239
|
String synSubstStr = getValue(record, SYNSUBSTSTR, true);
|
240
|
String typeStr = getValue(record, TYPE, true);
|
241
|
|
242
|
|
243
|
String nomRefTitle = null;
|
244
|
String nomRefDetail;
|
245
|
String nomRefPupDate = null;
|
246
|
String nomRefIssue = null;
|
247
|
Partial pupDate = null;
|
248
|
|
249
|
boolean restoreOriginalReference = false;
|
250
|
boolean nameIsValid = true;
|
251
|
|
252
|
// preprocess nomRef: separate citation, reference detail, publishing date
|
253
|
if(!StringUtils.isEmpty(nomRefStr)){
|
254
|
nomRefStr = nomRefStr.trim();
|
255
|
|
256
|
// handle the special case which is hard to parse:
|
257
|
//
|
258
|
// Las variedades tradicionales de frutales de la Cuenca del Río Segura. Catálogo Etnobotánico (1): Frutos secos, oleaginosos, frutales de hueso, almendros y frutales de pepita: 154. 1997.
|
259
|
if(nomRefStr.startsWith("Las variedades tradicionales de frutales ")){
|
260
|
|
261
|
if(bookVariedadesTradicionales == null){
|
262
|
bookVariedadesTradicionales = ReferenceFactory.newBook();
|
263
|
bookVariedadesTradicionales.setTitle("Las variedades tradicionales de frutales de la Cuenca del Río Segura. Catálogo Etnobotánico (1): Frutos secos, oleaginosos, frutales de hueso, almendros y frutales de pepita");
|
264
|
bookVariedadesTradicionales.setDatePublished(TimePeriod.NewInstance(1997));
|
265
|
getReferenceService().save(bookVariedadesTradicionales);
|
266
|
}
|
267
|
nomRefStr = nomRefStr.replaceAll("^.*?\\:.*?\\:", "Las variedades tradicionales:");
|
268
|
restoreOriginalReference = true;
|
269
|
}
|
270
|
|
271
|
Matcher m = nomRefTokenizeP.matcher(nomRefStr);
|
272
|
if(m.matches()){
|
273
|
nomRefTitle = m.group("title");
|
274
|
nomRefDetail = m.group("detail");
|
275
|
nomRefPupDate = m.group("date").trim();
|
276
|
nomRefIssue = m.group("issue");
|
277
|
|
278
|
pupDate = parseDate(regNumber, nomRefPupDate);
|
279
|
if (pupDate != null) {
|
280
|
nomRefTitle = nomRefTitle + ": " + nomRefDetail + ". " + pupDate.toString(formatterYear) + ".";
|
281
|
} else {
|
282
|
logger.warn(csvReportLine(regNumber, "Pub date", nomRefPupDate, "in", nomRefStr, "not parsable"));
|
283
|
}
|
284
|
} else {
|
285
|
nomRefTitle = nomRefStr;
|
286
|
}
|
287
|
}
|
288
|
|
289
|
BotanicalName taxonName = makeBotanicalName(state, regNumber, titleCacheStr, nameStr, authorStr, nomRefTitle);
|
290
|
|
291
|
// always add the original strings of parsed data as annotation
|
292
|
taxonName.addAnnotation(Annotation.NewInstance("imported and parsed data strings:" +
|
293
|
"\n - '" + LITSTRING + "': "+ nomRefStr +
|
294
|
"\n - '" + TYPE + "': " + typeStr +
|
295
|
"\n - '" + REGISTRATION + "': " + regStr
|
296
|
, AnnotationType.TECHNICAL(), Language.DEFAULT()));
|
297
|
|
298
|
if(restoreOriginalReference){
|
299
|
taxonName.setNomenclaturalReference(bookVariedadesTradicionales);
|
300
|
}
|
301
|
|
302
|
if(taxonName.getNomenclaturalReference() != null){
|
303
|
if(pupDate != null) {
|
304
|
taxonName.getNomenclaturalReference().setDatePublished(TimePeriod.NewInstance(pupDate));
|
305
|
}
|
306
|
if(nomRefIssue != null) {
|
307
|
((Reference)taxonName.getNomenclaturalReference()).setVolume(nomRefIssue);
|
308
|
}
|
309
|
}
|
310
|
|
311
|
|
312
|
if(!StringUtils.isEmpty(notesTxt)){
|
313
|
notesTxt = notesTxt.replace("Notes: ", "").trim();
|
314
|
taxonName.addAnnotation(Annotation.NewInstance(notesTxt, AnnotationType.EDITORIAL(), Language.DEFAULT()));
|
315
|
nameIsValid = false;
|
316
|
|
317
|
}
|
318
|
if(!StringUtils.isEmpty(caveats)){
|
319
|
caveats = caveats.replace("Caveats: ", "").trim();
|
320
|
taxonName.addAnnotation(Annotation.NewInstance(caveats, annotationTypeCaveats(), Language.DEFAULT()));
|
321
|
nameIsValid = false;
|
322
|
}
|
323
|
|
324
|
if(nameIsValid){
|
325
|
// Status is always considered valid if no notes and cavets are set
|
326
|
taxonName.addStatus(NomenclaturalStatus.NewInstance(NomenclaturalStatusType.VALID()));
|
327
|
}
|
328
|
|
329
|
getNameService().save(taxonName);
|
330
|
|
331
|
// Namerelations
|
332
|
if(!StringUtils.isEmpty(authorsSpelling)){
|
333
|
authorsSpelling = authorsSpelling.replaceFirst("Author's spelling:", "").replaceAll("\"", "").trim();
|
334
|
|
335
|
String[] authorSpellingTokens = StringUtils.split(authorsSpelling, " ");
|
336
|
String[] nameStrTokens = StringUtils.split(nameStr, " ");
|
337
|
|
338
|
ArrayUtils.reverse(authorSpellingTokens);
|
339
|
ArrayUtils.reverse(nameStrTokens);
|
340
|
|
341
|
for (int i = 0; i < nameStrTokens.length; i++){
|
342
|
if(i < authorSpellingTokens.length){
|
343
|
nameStrTokens[i] = authorSpellingTokens[i];
|
344
|
}
|
345
|
}
|
346
|
ArrayUtils.reverse(nameStrTokens);
|
347
|
|
348
|
String misspelledNameStr = StringUtils.join (nameStrTokens, ' ');
|
349
|
// build the fullnameString of the misspelled name
|
350
|
misspelledNameStr = taxonName.getTitleCache().replace(nameStr, misspelledNameStr);
|
351
|
|
352
|
TaxonNameBase misspelledName = nameParser.parseReferencedName(misspelledNameStr, NomenclaturalCode.ICNAFP, null);
|
353
|
misspelledName.addRelationshipToName(taxonName, NameRelationshipType.MISSPELLING(), null);
|
354
|
getNameService().save(misspelledName);
|
355
|
}
|
356
|
|
357
|
// Replaced Synonyms
|
358
|
if(!StringUtils.isEmpty(fullSynSubstStr)){
|
359
|
fullSynSubstStr = fullSynSubstStr.replace("Syn. subst.: ", "");
|
360
|
BotanicalName replacedSynonymName = makeBotanicalName(state, regNumber, fullSynSubstStr, synSubstStr, null, null);
|
361
|
replacedSynonymName.addReplacedSynonym(taxonName, null, null, null);
|
362
|
getNameService().save(replacedSynonymName);
|
363
|
}
|
364
|
|
365
|
Reference sec = state.getConfig().getSecReference();
|
366
|
Taxon taxon = Taxon.NewInstance(taxonName, sec);
|
367
|
|
368
|
// Basionym
|
369
|
if(fullBasionymStr != null){
|
370
|
fullBasionymStr = fullBasionymStr.replaceAll("^\\w*:\\s", ""); // Strip off the leading 'Basionym: "
|
371
|
basionymNameStr = basionymNameStr.replaceAll("^\\w*:\\s", ""); // Strip off the leading 'Basionym: "
|
372
|
BotanicalName basionym = makeBotanicalName(state, regNumber, fullBasionymStr, basionymNameStr, null, null);
|
373
|
getNameService().save(basionym);
|
374
|
taxonName.addBasionym(basionym);
|
375
|
|
376
|
Synonym syn = Synonym.NewInstance(basionym, sec);
|
377
|
taxon.addSynonym(syn, SynonymType.HOMOTYPIC_SYNONYM_OF());
|
378
|
getTaxonService().save(syn);
|
379
|
}
|
380
|
|
381
|
// Markers
|
382
|
if(isFossil){
|
383
|
taxon.addMarker(Marker.NewInstance(markerTypeFossil(), true));
|
384
|
}
|
385
|
if(!nameSet.add(titleCacheStr)){
|
386
|
taxonName.addMarker(Marker.NewInstance(markerDuplicateRegistration(), true));
|
387
|
logger.warn(csvReportLine(regNumber, "Duplicate registration of", titleCacheStr));
|
388
|
}
|
389
|
|
390
|
|
391
|
// Types
|
392
|
if(!StringUtils.isEmpty(typeStr)){
|
393
|
|
394
|
if(taxonName.getRank().isSpecies() || taxonName.getRank().isLower(Rank.SPECIES())) {
|
395
|
makeSpecimenTypeData(typeStr, taxonName, regNumber, state, false);
|
396
|
} else {
|
397
|
makeNameTypeData(typeStr, taxonName, regNumber, state);
|
398
|
}
|
399
|
}
|
400
|
|
401
|
getTaxonService().save(taxon);
|
402
|
|
403
|
if(taxonName.getRank().equals(Rank.SPECIES()) || taxonName.getRank().isLower(Rank.SPECIES())){
|
404
|
// try to find the genus, it should have been imported already, Genera are coming first in the import file
|
405
|
Taxon genus = ((IAPTImportState)state).getGenusTaxonMap().get(taxonName.getGenusOrUninomial());
|
406
|
if(genus != null){
|
407
|
higherTaxonNode = genus.getTaxonNodes().iterator().next();
|
408
|
} else {
|
409
|
logger.info(csvReportLine(regNumber, "Parent genus not found for", nameStr));
|
410
|
}
|
411
|
}
|
412
|
|
413
|
if(higherTaxonNode != null){
|
414
|
higherTaxonNode.addChildTaxon(taxon, null, null);
|
415
|
getTaxonNodeService().save(higherTaxonNode);
|
416
|
}
|
417
|
|
418
|
if(taxonName.getRank().isGenus()){
|
419
|
((IAPTImportState)state).getGenusTaxonMap().put(taxonName.getGenusOrUninomial(), taxon);
|
420
|
}
|
421
|
|
422
|
return taxon;
|
423
|
}
|
424
|
|
425
|
private void makeSpecimenTypeData(String typeStr, BotanicalName taxonName, String regNumber, SimpleExcelTaxonImportState<CONFIG> state, boolean isFossil) {
|
426
|
|
427
|
Matcher m = typeSpecimenSplitPattern.matcher(typeStr);
|
428
|
|
429
|
if(m.matches()){
|
430
|
String fieldUnitStr = m.group(TypesName.fieldUnit.name());
|
431
|
// boolean isFieldUnit = typeStr.matches(".*([°']|\\d+\\s?m\\s|\\d+\\s?km\\s).*"); // check for location or unit m, km // makes no sense!!!!
|
432
|
FieldUnit fieldUnit = parseFieldUnit(fieldUnitStr, regNumber, state);
|
433
|
if(fieldUnit == null) {
|
434
|
// create a field unit with only a titleCache using the fieldUnitStr substring
|
435
|
logger.warn(csvReportLine(regNumber, "Type: fieldUnitStr can not be parsed", fieldUnitStr));
|
436
|
fieldUnit = FieldUnit.NewInstance();
|
437
|
fieldUnit.setTitleCache(fieldUnitStr, true);
|
438
|
getOccurrenceService().save(fieldUnit);
|
439
|
}
|
440
|
getOccurrenceService().save(fieldUnit);
|
441
|
|
442
|
SpecimenOrObservationType specimenType;
|
443
|
if(isFossil){
|
444
|
specimenType = SpecimenOrObservationType.Fossil;
|
445
|
} else {
|
446
|
specimenType = SpecimenOrObservationType.PreservedSpecimen;
|
447
|
}
|
448
|
|
449
|
// all others ..
|
450
|
addSpecimenTypes(taxonName, fieldUnit, m.group(TypesName.holotype.name()), TypesName.holotype, false, regNumber, specimenType);
|
451
|
addSpecimenTypes(taxonName, fieldUnit, m.group(TypesName.isotype.name()), TypesName.isotype, true, regNumber, specimenType);
|
452
|
|
453
|
} else {
|
454
|
// create a field unit with only a titleCache using the full typeStr
|
455
|
FieldUnit fieldUnit = FieldUnit.NewInstance();
|
456
|
fieldUnit.setTitleCache(typeStr, true);
|
457
|
getOccurrenceService().save(fieldUnit);
|
458
|
logger.warn(csvReportLine(regNumber, "Type: field 'Type' can not be parsed", typeStr));
|
459
|
}
|
460
|
getNameService().save(taxonName);
|
461
|
}
|
462
|
|
463
|
private void makeNameTypeData(String typeStr, BotanicalName taxonName, String regNumber, SimpleExcelTaxonImportState<CONFIG> state) {
|
464
|
|
465
|
String nameStr = typeStr.replaceAll("^Type\\s?\\:\\s?", "");
|
466
|
if(nameStr.isEmpty()) {
|
467
|
return;
|
468
|
}
|
469
|
|
470
|
String basionymNameStr = null;
|
471
|
String noteStr = null;
|
472
|
String agentStr = null;
|
473
|
|
474
|
Matcher m;
|
475
|
|
476
|
if(typeStr.startsWith("not to be indicated")){
|
477
|
// Special case:
|
478
|
// Type: not to be indicated (Art. H.9.1. Tokyo Code); stated parent genera: Hechtia Klotzsch; Deuterocohnia Mez
|
479
|
// FIXME
|
480
|
m = typeNameSpecialSplitPattern.matcher(nameStr);
|
481
|
if(m.matches()){
|
482
|
nameStr = m.group("name");
|
483
|
noteStr = m.group("note");
|
484
|
agentStr = m.group("agent");
|
485
|
// TODO better import of agent?
|
486
|
if(agentStr != null){
|
487
|
noteStr = noteStr + ": " + agentStr;
|
488
|
}
|
489
|
}
|
490
|
} else {
|
491
|
// Generic case
|
492
|
m = typeNameBasionymPattern.matcher(nameStr);
|
493
|
if (m.find()) {
|
494
|
basionymNameStr = m.group("basionymName");
|
495
|
if (basionymNameStr != null) {
|
496
|
nameStr = nameStr.replace(m.group(0), "");
|
497
|
}
|
498
|
}
|
499
|
|
500
|
m = typeNameNotePattern.matcher(nameStr);
|
501
|
if (m.find()) {
|
502
|
noteStr = m.group(1);
|
503
|
if (noteStr != null) {
|
504
|
nameStr = nameStr.replace(m.group(0), "");
|
505
|
}
|
506
|
}
|
507
|
}
|
508
|
|
509
|
BotanicalName typeName = (BotanicalName) nameParser.parseFullName(nameStr, NomenclaturalCode.ICNAFP, null);
|
510
|
|
511
|
if(typeName.isProtectedTitleCache() || typeName.getNomenclaturalReference() != null && typeName.getNomenclaturalReference().isProtectedTitleCache()) {
|
512
|
logger.warn(csvReportLine(regNumber, "NameType not parsable", typeStr, nameStr));
|
513
|
}
|
514
|
|
515
|
if(basionymNameStr != null){
|
516
|
BotanicalName basionymName = (BotanicalName) nameParser.parseFullName(nameStr, NomenclaturalCode.ICNAFP, null);
|
517
|
getNameService().save(basionymName);
|
518
|
typeName.addBasionym(basionymName);
|
519
|
}
|
520
|
|
521
|
|
522
|
NameTypeDesignation nameTypeDesignation = NameTypeDesignation.NewInstance();
|
523
|
nameTypeDesignation.setTypeName(typeName);
|
524
|
getNameService().save(typeName);
|
525
|
|
526
|
if(noteStr != null){
|
527
|
nameTypeDesignation.addAnnotation(Annotation.NewInstance(noteStr, AnnotationType.EDITORIAL(), Language.UNKNOWN_LANGUAGE()));
|
528
|
}
|
529
|
taxonName.addNameTypeDesignation(typeName, null, null, null, null, false);
|
530
|
|
531
|
}
|
532
|
|
533
|
/**
|
534
|
* Currently only parses the collector, fieldNumber and the collection date.
|
535
|
*
|
536
|
* @param fieldUnitStr
|
537
|
* @param regNumber
|
538
|
* @param state
|
539
|
* @return null if the fieldUnitStr could not be parsed
|
540
|
*/
|
541
|
protected FieldUnit parseFieldUnit(String fieldUnitStr, String regNumber, SimpleExcelTaxonImportState<CONFIG> state) {
|
542
|
|
543
|
FieldUnit fieldUnit = null;
|
544
|
|
545
|
Matcher m1 = collectorPattern.matcher(fieldUnitStr);
|
546
|
if(m1.matches()){
|
547
|
|
548
|
String collectorData = m1.group(2); // like ... (leg. Metzeltin, 30. 9. 1996)
|
549
|
String removal = m1.group(1);
|
550
|
if(collectorData == null){
|
551
|
collectorData = m1.group(4); // like ... leg. Metzeltin, 30. 9. 1996
|
552
|
removal = m1.group(3);
|
553
|
}
|
554
|
if(collectorData == null){
|
555
|
collectorData = m1.group(6); // like ^leg. J. J. Halda 18.3.1997$
|
556
|
removal = null;
|
557
|
}
|
558
|
if(collectorData == null){
|
559
|
return null;
|
560
|
}
|
561
|
|
562
|
// the fieldUnitStr is parsable
|
563
|
// remove all collectorData from the fieldUnitStr and use the rest as locality
|
564
|
String locality = null;
|
565
|
if(removal != null){
|
566
|
locality = fieldUnitStr.replace(removal, "");
|
567
|
}
|
568
|
|
569
|
String collectorStr = null;
|
570
|
String detailStr = null;
|
571
|
Partial date = null;
|
572
|
String fieldNumber = null;
|
573
|
|
574
|
Matcher m2 = collectionDataPattern.matcher(collectorData);
|
575
|
if(m2.matches()){
|
576
|
collectorStr = m2.group("collector");
|
577
|
detailStr = m2.group("detail");
|
578
|
|
579
|
// Try to make sense of the detailStr
|
580
|
if(detailStr != null){
|
581
|
detailStr = detailStr.trim();
|
582
|
// 1. try to parse as date
|
583
|
date = parseDate(regNumber, detailStr);
|
584
|
if(date == null){
|
585
|
// 2. try to parse as number
|
586
|
if(collectorsNumber.matcher(detailStr).matches()){
|
587
|
fieldNumber = detailStr;
|
588
|
}
|
589
|
}
|
590
|
}
|
591
|
if(date == null && fieldNumber == null){
|
592
|
// detailed parsing not possible, so need fo fallback
|
593
|
collectorStr = collectorData;
|
594
|
}
|
595
|
}
|
596
|
|
597
|
if(collectorStr == null) {
|
598
|
collectorStr = collectorData;
|
599
|
}
|
600
|
|
601
|
fieldUnit = FieldUnit.NewInstance();
|
602
|
GatheringEvent ge = GatheringEvent.NewInstance();
|
603
|
if(locality != null){
|
604
|
ge.setLocality(LanguageString.NewInstance(locality, Language.UNKNOWN_LANGUAGE()));
|
605
|
}
|
606
|
|
607
|
TeamOrPersonBase agent = state.getAgentBase(collectorStr);
|
608
|
if(agent == null) {
|
609
|
agent = Person.NewTitledInstance(collectorStr);
|
610
|
getAgentService().save(agent);
|
611
|
state.putAgentBase(collectorStr, agent);
|
612
|
}
|
613
|
ge.setCollector(agent);
|
614
|
|
615
|
if(date != null){
|
616
|
ge.setGatheringDate(date);
|
617
|
}
|
618
|
|
619
|
getEventBaseService().save(ge);
|
620
|
fieldUnit.setGatheringEvent(ge);
|
621
|
|
622
|
if(fieldNumber != null) {
|
623
|
fieldUnit.setFieldNumber(fieldNumber);
|
624
|
}
|
625
|
getOccurrenceService().save(fieldUnit);
|
626
|
|
627
|
}
|
628
|
|
629
|
return fieldUnit;
|
630
|
}
|
631
|
|
632
|
protected Partial parseDate(String regNumber, String dateStr) {
|
633
|
|
634
|
Partial pupDate = null;
|
635
|
boolean parseError = false;
|
636
|
|
637
|
String day = null;
|
638
|
String month = null;
|
639
|
String monthName = null;
|
640
|
String year = null;
|
641
|
|
642
|
for(Pattern p : datePatterns){
|
643
|
Matcher m2 = p.matcher(dateStr);
|
644
|
if(m2.matches()){
|
645
|
try {
|
646
|
year = m2.group("year");
|
647
|
} catch (IllegalArgumentException e){
|
648
|
// named capture group not found
|
649
|
}
|
650
|
try {
|
651
|
month = m2.group("month");
|
652
|
} catch (IllegalArgumentException e){
|
653
|
// named capture group not found
|
654
|
}
|
655
|
|
656
|
try {
|
657
|
monthName = m2.group("monthName");
|
658
|
month = monthFromName(monthName, regNumber);
|
659
|
if(month == null){
|
660
|
parseError = true;
|
661
|
}
|
662
|
} catch (IllegalArgumentException e){
|
663
|
// named capture group not found
|
664
|
}
|
665
|
try {
|
666
|
day = m2.group("day");
|
667
|
} catch (IllegalArgumentException e){
|
668
|
// named capture group not found
|
669
|
}
|
670
|
|
671
|
if(year != null){
|
672
|
if (year.length() == 2) {
|
673
|
// it is an abbreviated year from the 19** years
|
674
|
year = "19" + year;
|
675
|
}
|
676
|
break;
|
677
|
} else {
|
678
|
parseError = true;
|
679
|
}
|
680
|
}
|
681
|
}
|
682
|
if(year == null){
|
683
|
parseError = true;
|
684
|
}
|
685
|
List<DateTimeFieldType> types = new ArrayList<>();
|
686
|
List<Integer> values = new ArrayList<>();
|
687
|
if(!parseError) {
|
688
|
types.add(DateTimeFieldType.year());
|
689
|
values.add(Integer.parseInt(year));
|
690
|
if (month != null) {
|
691
|
types.add(DateTimeFieldType.monthOfYear());
|
692
|
values.add(Integer.parseInt(month));
|
693
|
}
|
694
|
if (day != null) {
|
695
|
types.add(DateTimeFieldType.dayOfMonth());
|
696
|
values.add(Integer.parseInt(day));
|
697
|
}
|
698
|
pupDate = new Partial(types.toArray(new DateTimeFieldType[types.size()]), ArrayUtils.toPrimitive(values.toArray(new Integer[values.size()])));
|
699
|
}
|
700
|
return pupDate;
|
701
|
}
|
702
|
|
703
|
private String monthFromName(String monthName, String regNumber) {
|
704
|
|
705
|
Integer month = monthFromNameMap.get(monthName.toLowerCase());
|
706
|
if(month == null){
|
707
|
logger.warn(csvReportLine(regNumber, "Unknown month name", monthName));
|
708
|
return null;
|
709
|
} else {
|
710
|
return month.toString();
|
711
|
}
|
712
|
}
|
713
|
|
714
|
|
715
|
private void addSpecimenTypes(BotanicalName taxonName, FieldUnit fieldUnit, String typeStr, TypesName typeName, boolean multiple, String regNumber, SpecimenOrObservationType specimenType){
|
716
|
|
717
|
if(StringUtils.isEmpty(typeStr)){
|
718
|
return;
|
719
|
}
|
720
|
typeStr = typeStr.trim().replaceAll("\\.$", "");
|
721
|
|
722
|
Collection collection = null;
|
723
|
DerivedUnit specimen = null;
|
724
|
|
725
|
List<DerivedUnit> specimens = new ArrayList<>();
|
726
|
if(multiple){
|
727
|
String[] tokens = typeStr.split("\\s?,\\s?");
|
728
|
for (String t : tokens) {
|
729
|
// command to list all complex parsabel types:
|
730
|
// csvcut -t -c RegistrationNo_Pk,Type iapt.csv | csvgrep -c Type -m "Holotype" | egrep -o 'Holotype:\s([A-Z]*\s)[^.]*?'
|
731
|
// csvcut -t -c RegistrationNo_Pk,Type iapt.csv | csvgrep -c Type -m "Holotype" | egrep -o 'Isotype[^:]*:\s([A-Z]*\s)[^.]*?'
|
732
|
|
733
|
if(!t.isEmpty()){
|
734
|
// trying to parse the string
|
735
|
specimen = parseSpecimenType(fieldUnit, typeName, collection, t, regNumber);
|
736
|
if(specimen != null){
|
737
|
specimens.add(specimen);
|
738
|
} else {
|
739
|
// parsing was not successful make simple specimen
|
740
|
specimens.add(makeSpecimenType(fieldUnit, t, specimenType));
|
741
|
}
|
742
|
}
|
743
|
}
|
744
|
} else {
|
745
|
specimen = parseSpecimenType(fieldUnit, typeName, collection, typeStr, regNumber);
|
746
|
if(specimen != null) {
|
747
|
specimens.add(specimen);
|
748
|
// remember current collection
|
749
|
collection = specimen.getCollection();
|
750
|
} else {
|
751
|
// parsing was not successful make simple specimen
|
752
|
specimens.add(makeSpecimenType(fieldUnit, typeStr, SpecimenOrObservationType.PreservedSpecimen));
|
753
|
}
|
754
|
}
|
755
|
|
756
|
for(DerivedUnit s : specimens){
|
757
|
taxonName.addSpecimenTypeDesignation(s, typeName.status(), null, null, null, false, true);
|
758
|
}
|
759
|
}
|
760
|
|
761
|
private DerivedUnit makeSpecimenType(FieldUnit fieldUnit, String titleCache, SpecimenOrObservationType specimenType) {
|
762
|
DerivedUnit specimen;DerivedUnitFacade facade = DerivedUnitFacade.NewInstance(specimenType, fieldUnit);
|
763
|
facade.setTitleCache(titleCache.trim(), true);
|
764
|
specimen = facade.innerDerivedUnit();
|
765
|
return specimen;
|
766
|
}
|
767
|
|
768
|
/**
|
769
|
*
|
770
|
* @param fieldUnit
|
771
|
* @param typeName
|
772
|
* @param collection
|
773
|
* @param text
|
774
|
* @param regNumber
|
775
|
* @return
|
776
|
*/
|
777
|
protected DerivedUnit parseSpecimenType(FieldUnit fieldUnit, TypesName typeName, Collection collection, String text, String regNumber) {
|
778
|
|
779
|
DerivedUnit specimen = null;
|
780
|
|
781
|
String collectionCode = null;
|
782
|
String collectionTitle = null;
|
783
|
String subCollectionStr = null;
|
784
|
String instituteStr = null;
|
785
|
String accessionNumber = null;
|
786
|
|
787
|
boolean unusualAccessionNumber = false;
|
788
|
|
789
|
text = text.trim();
|
790
|
|
791
|
// 1. For Isotypes often the accession number is noted alone if the
|
792
|
// preceeding entry has a collection code.
|
793
|
if(typeName .equals(TypesName.isotype) && collection != null){
|
794
|
Matcher m = accessionNumberOnlyPattern.matcher(text);
|
795
|
if(m.matches()){
|
796
|
try {
|
797
|
accessionNumber = m.group("accNumber");
|
798
|
specimen = makeSpecimenType(fieldUnit, collection, accessionNumber);
|
799
|
} catch (IllegalArgumentException e){
|
800
|
// match group acc_number not found
|
801
|
}
|
802
|
}
|
803
|
}
|
804
|
|
805
|
//2. try it the 'normal' way
|
806
|
if(specimen == null) {
|
807
|
for (Pattern p : specimenTypePatterns) {
|
808
|
Matcher m = p.matcher(text);
|
809
|
if (m.matches()) {
|
810
|
// collection code or collectionTitle is mandatory
|
811
|
try {
|
812
|
collectionCode = m.group("colCode");
|
813
|
} catch (IllegalArgumentException e){
|
814
|
// match group colCode not found
|
815
|
}
|
816
|
|
817
|
try {
|
818
|
instituteStr = m.group("institute");
|
819
|
} catch (IllegalArgumentException e){
|
820
|
// match group col_name not found
|
821
|
}
|
822
|
|
823
|
try {
|
824
|
subCollectionStr = m.group("subCollection");
|
825
|
} catch (IllegalArgumentException e){
|
826
|
// match group subCollection not found
|
827
|
}
|
828
|
try {
|
829
|
accessionNumber = m.group("accNumber");
|
830
|
|
831
|
// try to improve the accessionNumber
|
832
|
if(accessionNumber!= null) {
|
833
|
accessionNumber = accessionNumber.trim();
|
834
|
Matcher m2 = accessionNumberOnlyPattern.matcher(accessionNumber);
|
835
|
String betterAccessionNumber = null;
|
836
|
if (m2.matches()) {
|
837
|
try {
|
838
|
betterAccessionNumber = m.group("accNumber");
|
839
|
} catch (IllegalArgumentException e) {
|
840
|
// match group acc_number not found
|
841
|
}
|
842
|
}
|
843
|
if (betterAccessionNumber != null) {
|
844
|
accessionNumber = betterAccessionNumber;
|
845
|
} else {
|
846
|
unusualAccessionNumber = true;
|
847
|
}
|
848
|
}
|
849
|
|
850
|
} catch (IllegalArgumentException e){
|
851
|
// match group acc_number not found
|
852
|
}
|
853
|
|
854
|
if(collectionCode == null && instituteStr == null){
|
855
|
logger.warn(csvReportLine(regNumber, "Type: neither 'collectionCode' nor 'institute' found in ", text));
|
856
|
continue;
|
857
|
}
|
858
|
collection = getCollection(collectionCode, instituteStr, subCollectionStr);
|
859
|
specimen = makeSpecimenType(fieldUnit, collection, accessionNumber);
|
860
|
break;
|
861
|
}
|
862
|
}
|
863
|
}
|
864
|
if(specimen == null) {
|
865
|
logger.warn(csvReportLine(regNumber, "Type: Could not parse specimen", typeName.name().toString(), text));
|
866
|
}
|
867
|
if(unusualAccessionNumber){
|
868
|
logger.warn(csvReportLine(regNumber, "Type: Unusual accession number", typeName.name().toString(), text, accessionNumber));
|
869
|
}
|
870
|
return specimen;
|
871
|
}
|
872
|
|
873
|
private DerivedUnit makeSpecimenType(FieldUnit fieldUnit, Collection collection, String accessionNumber) {
|
874
|
|
875
|
DerivedUnitFacade facade = DerivedUnitFacade.NewInstance(SpecimenOrObservationType.PreservedSpecimen, fieldUnit);
|
876
|
facade.setCollection(collection);
|
877
|
if(accessionNumber != null){
|
878
|
facade.setAccessionNumber(accessionNumber);
|
879
|
}
|
880
|
return facade.innerDerivedUnit();
|
881
|
}
|
882
|
|
883
|
private BotanicalName makeBotanicalName(SimpleExcelTaxonImportState<CONFIG> state, String regNumber, String titleCacheStr, String nameStr,
|
884
|
String authorStr, String nomRefTitle) {
|
885
|
|
886
|
BotanicalName taxonName;// cache field for the taxonName.titleCache
|
887
|
String taxonNameTitleCache = null;
|
888
|
Map<String, AnnotationType> nameAnnotations = new HashMap<>();
|
889
|
|
890
|
// TitleCache preprocessing
|
891
|
if(titleCacheStr.endsWith(ANNOTATION_MARKER_STRING) || (authorStr != null && authorStr.endsWith(ANNOTATION_MARKER_STRING))){
|
892
|
nameAnnotations.put("Author abbreviation not checked.", AnnotationType.EDITORIAL());
|
893
|
titleCacheStr = titleCacheStr.replace(ANNOTATION_MARKER_STRING, "").trim();
|
894
|
if(authorStr != null) {
|
895
|
authorStr = authorStr.replace(ANNOTATION_MARKER_STRING, "").trim();
|
896
|
}
|
897
|
}
|
898
|
|
899
|
// parse the full taxon name
|
900
|
if(!StringUtils.isEmpty(nomRefTitle)){
|
901
|
String referenceSeparator = nomRefTitle.startsWith("in ") ? " " : ", ";
|
902
|
String taxonFullNameStr = titleCacheStr + referenceSeparator + nomRefTitle;
|
903
|
logger.debug(":::::" + taxonFullNameStr);
|
904
|
taxonName = (BotanicalName) nameParser.parseReferencedName(taxonFullNameStr, NomenclaturalCode.ICNAFP, null);
|
905
|
} else {
|
906
|
taxonName = (BotanicalName) nameParser.parseFullName(titleCacheStr, NomenclaturalCode.ICNAFP, null);
|
907
|
}
|
908
|
|
909
|
taxonNameTitleCache = taxonName.getTitleCache().trim();
|
910
|
if (taxonName.isProtectedTitleCache()) {
|
911
|
logger.warn(csvReportLine(regNumber, "Name could not be parsed", titleCacheStr));
|
912
|
} else {
|
913
|
|
914
|
boolean doRestoreTitleCacheStr = false;
|
915
|
|
916
|
// Check if titleCache and nameCache are plausible
|
917
|
String titleCacheCompareStr = titleCacheStr;
|
918
|
String nameCache = taxonName.getNameCache();
|
919
|
String nameCompareStr = nameStr;
|
920
|
if(taxonName.isBinomHybrid()){
|
921
|
titleCacheCompareStr = titleCacheCompareStr.replace(" x ", " ×");
|
922
|
nameCompareStr = nameCompareStr.replace(" x ", " ×");
|
923
|
}
|
924
|
if(taxonName.isMonomHybrid()){
|
925
|
titleCacheCompareStr = titleCacheCompareStr.replaceAll("^X ", "× ");
|
926
|
nameCompareStr = nameCompareStr.replace("^X ", "× ");
|
927
|
}
|
928
|
if(authorStr != null && authorStr.contains(" et ")){
|
929
|
titleCacheCompareStr = titleCacheCompareStr.replaceAll(" et ", " & ");
|
930
|
}
|
931
|
if (!taxonNameTitleCache.equals(titleCacheCompareStr)) {
|
932
|
logger.warn(csvReportLine(regNumber, "The generated titleCache differs from the imported string", taxonNameTitleCache, " != ", titleCacheStr, " ==> original titleCacheStr has been restored"));
|
933
|
doRestoreTitleCacheStr = true;
|
934
|
}
|
935
|
if (!nameCache.trim().equals(nameCompareStr)) {
|
936
|
logger.warn(csvReportLine(regNumber, "The parsed nameCache differs from field '" + NAMESTRING + "'", nameCache, " != ", nameCompareStr));
|
937
|
}
|
938
|
|
939
|
// Author
|
940
|
//nameParser.handleAuthors(taxonName, titleCacheStr, authorStr);
|
941
|
//if (!titleCacheStr.equals(taxonName.getTitleCache())) {
|
942
|
// logger.warn(regNumber + ": titleCache has changed after setting authors, will restore original titleCacheStr");
|
943
|
// doRestoreTitleCacheStr = true;
|
944
|
//}
|
945
|
|
946
|
if(doRestoreTitleCacheStr){
|
947
|
taxonName.setTitleCache(titleCacheStr, true);
|
948
|
}
|
949
|
|
950
|
// deduplicate
|
951
|
replaceAuthorNamesAndNomRef(state, taxonName);
|
952
|
}
|
953
|
|
954
|
// Annotations
|
955
|
if(!nameAnnotations.isEmpty()){
|
956
|
for(String text : nameAnnotations.keySet()){
|
957
|
taxonName.addAnnotation(Annotation.NewInstance(text, nameAnnotations.get(text), Language.DEFAULT()));
|
958
|
}
|
959
|
}
|
960
|
|
961
|
taxonName.addSource(OriginalSourceType.Import, regNumber, null, state.getConfig().getSourceReference(), null);
|
962
|
|
963
|
getNameService().save(taxonName);
|
964
|
|
965
|
return taxonName;
|
966
|
}
|
967
|
|
968
|
/**
|
969
|
* @param state
|
970
|
* @return
|
971
|
*/
|
972
|
private TaxonNode getClassificationRootNode(IAPTImportState state) {
|
973
|
|
974
|
// Classification classification = state.getClassification();
|
975
|
// if (classification == null){
|
976
|
// IAPTImportConfigurator config = state.getConfig();
|
977
|
// classification = Classification.NewInstance(state.getConfig().getClassificationName());
|
978
|
// classification.setUuid(config.getClassificationUuid());
|
979
|
// classification.setReference(config.getSecReference());
|
980
|
// classification = getClassificationService().find(state.getConfig().getClassificationUuid());
|
981
|
// }
|
982
|
TaxonNode rootNode = state.getRootNode();
|
983
|
if (rootNode == null){
|
984
|
rootNode = getTaxonNodeService().find(ROOT_UUID);
|
985
|
}
|
986
|
if (rootNode == null){
|
987
|
Classification classification = state.getClassification();
|
988
|
if (classification == null){
|
989
|
Reference sec = state.getSecReference();
|
990
|
String classificationName = state.getConfig().getClassificationName();
|
991
|
Language language = Language.DEFAULT();
|
992
|
classification = Classification.NewInstance(classificationName, sec, language);
|
993
|
state.setClassification(classification);
|
994
|
classification.setUuid(state.getConfig().getClassificationUuid());
|
995
|
classification.getRootNode().setUuid(ROOT_UUID);
|
996
|
getClassificationService().save(classification);
|
997
|
}
|
998
|
rootNode = classification.getRootNode();
|
999
|
state.setRootNode(rootNode);
|
1000
|
}
|
1001
|
return rootNode;
|
1002
|
}
|
1003
|
|
1004
|
private Collection getCollection(String collectionCode, String instituteStr, String subCollectionStr){
|
1005
|
|
1006
|
Collection superCollection = null;
|
1007
|
if(subCollectionStr != null){
|
1008
|
superCollection = getCollection(collectionCode, instituteStr, null);
|
1009
|
collectionCode = subCollectionStr;
|
1010
|
instituteStr = null;
|
1011
|
}
|
1012
|
|
1013
|
final String key = collectionCode + "-#i:" + StringUtils.defaultString(instituteStr);
|
1014
|
|
1015
|
Collection collection = collectionMap.get(key);
|
1016
|
|
1017
|
if(collection == null) {
|
1018
|
collection = Collection.NewInstance();
|
1019
|
collection.setCode(collectionCode);
|
1020
|
if(instituteStr != null){
|
1021
|
collection.setInstitute(Institution.NewNamedInstance(instituteStr));
|
1022
|
}
|
1023
|
if(superCollection != null){
|
1024
|
collection.setSuperCollection(superCollection);
|
1025
|
}
|
1026
|
collectionMap.put(key, collection);
|
1027
|
if(!_testMode) {
|
1028
|
getCollectionService().save(collection);
|
1029
|
}
|
1030
|
}
|
1031
|
|
1032
|
return collection;
|
1033
|
}
|
1034
|
|
1035
|
|
1036
|
/**
|
1037
|
* @param record
|
1038
|
* @param originalKey
|
1039
|
* @param doUnescapeHtmlEntities
|
1040
|
* @return
|
1041
|
*/
|
1042
|
private String getValue(HashMap<String, String> record, String originalKey, boolean doUnescapeHtmlEntities) {
|
1043
|
String value = record.get(originalKey);
|
1044
|
|
1045
|
value = fixCharacters(value);
|
1046
|
|
1047
|
if (! StringUtils.isBlank(value)) {
|
1048
|
if (logger.isDebugEnabled()) {
|
1049
|
logger.debug(originalKey + ": " + value);
|
1050
|
}
|
1051
|
value = CdmUtils.removeDuplicateWhitespace(value.trim()).toString();
|
1052
|
if(doUnescapeHtmlEntities){
|
1053
|
value = StringEscapeUtils.unescapeHtml(value);
|
1054
|
}
|
1055
|
return value.trim();
|
1056
|
}else{
|
1057
|
return null;
|
1058
|
}
|
1059
|
}
|
1060
|
|
1061
|
/**
|
1062
|
* Fixes broken characters.
|
1063
|
* For details see
|
1064
|
* http://dev.e-taxonomy.eu/redmine/issues/6035
|
1065
|
*
|
1066
|
* @param value
|
1067
|
* @return
|
1068
|
*/
|
1069
|
private String fixCharacters(String value) {
|
1070
|
|
1071
|
value = StringUtils.replace(value, "s$K", "š");
|
1072
|
value = StringUtils.replace(value, "n$K", "ň");
|
1073
|
value = StringUtils.replace(value, "e$K", "ě");
|
1074
|
value = StringUtils.replace(value, "r$K", "ř");
|
1075
|
value = StringUtils.replace(value, "c$K", "č");
|
1076
|
value = StringUtils.replace(value, "z$K", "ž");
|
1077
|
value = StringUtils.replace(value, "S>U$K", "Š");
|
1078
|
value = StringUtils.replace(value, "C>U$K", "Č");
|
1079
|
value = StringUtils.replace(value, "R>U$K", "Ř");
|
1080
|
value = StringUtils.replace(value, "Z>U$K", "Ž");
|
1081
|
value = StringUtils.replace(value, "g$K", "ǧ");
|
1082
|
value = StringUtils.replace(value, "s$A", "ś");
|
1083
|
value = StringUtils.replace(value, "n$A", "ń");
|
1084
|
value = StringUtils.replace(value, "c$A", "ć");
|
1085
|
value = StringUtils.replace(value, "e$E", "ę");
|
1086
|
value = StringUtils.replace(value, "o$H", "õ");
|
1087
|
value = StringUtils.replace(value, "s$C", "ş");
|
1088
|
value = StringUtils.replace(value, "t$C", "ț");
|
1089
|
value = StringUtils.replace(value, "S>U$C", "Ş");
|
1090
|
value = StringUtils.replace(value, "a$O", "å");
|
1091
|
value = StringUtils.replace(value, "A>U$O", "Å");
|
1092
|
value = StringUtils.replace(value, "u$O", "ů");
|
1093
|
value = StringUtils.replace(value, "g$B", "ğ");
|
1094
|
value = StringUtils.replace(value, "g$B", "ĕ");
|
1095
|
value = StringUtils.replace(value, "a$B", "ă");
|
1096
|
value = StringUtils.replace(value, "l$/", "ł");
|
1097
|
value = StringUtils.replace(value, ">i", "ı");
|
1098
|
value = StringUtils.replace(value, "i$U", "ï");
|
1099
|
// Special-cases
|
1100
|
value = StringUtils.replace(value, "ý", "ý");
|
1101
|
value = StringUtils.replace(value, ">L", "Ł"); // corrected rule
|
1102
|
value = StringUtils.replace(value, "E>U$D", "З");
|
1103
|
value = StringUtils.replace(value, "S>U$E", "Ş");
|
1104
|
value = StringUtils.replace(value, "s$E", "ş");
|
1105
|
|
1106
|
value = StringUtils.replace(value, "c$k", "č");
|
1107
|
value = StringUtils.replace(value, " U$K", " Š");
|
1108
|
|
1109
|
value = StringUtils.replace(value, "O>U>!", "Ø");
|
1110
|
value = StringUtils.replace(value, "o>!", "ø");
|
1111
|
value = StringUtils.replace(value, "S$K", "Ŝ");
|
1112
|
value = StringUtils.replace(value, ">l", "ğ");
|
1113
|
|
1114
|
value = StringUtils.replace(value, "§B>i", "ł");
|
1115
|
value = StringUtils.replace(value, "¤", "ń");
|
1116
|
|
1117
|
return value;
|
1118
|
}
|
1119
|
|
1120
|
|
1121
|
/**
|
1122
|
* Stores taxa records in DB
|
1123
|
*/
|
1124
|
@Override
|
1125
|
protected void firstPass(SimpleExcelTaxonImportState<CONFIG> state) {
|
1126
|
|
1127
|
if(excludeFromImport(state)){
|
1128
|
return;
|
1129
|
}
|
1130
|
|
1131
|
String lineNumber = "L#" + state.getCurrentLine() + ": ";
|
1132
|
logger.setLevel(Level.DEBUG);
|
1133
|
HashMap<String, String> record = state.getOriginalRecord();
|
1134
|
logger.debug(lineNumber + record.toString());
|
1135
|
|
1136
|
Set<String> keys = record.keySet();
|
1137
|
for (String key: keys) {
|
1138
|
if (! expectedKeys.contains(key)){
|
1139
|
logger.warn(lineNumber + "Unexpected Key: " + key);
|
1140
|
}
|
1141
|
}
|
1142
|
|
1143
|
String reg_id = record.get(REGISTRATIONNO_PK);
|
1144
|
|
1145
|
//higherTaxon
|
1146
|
String higherTaxaString = record.get(HIGHERTAXON);
|
1147
|
boolean isFossil = false;
|
1148
|
if(higherTaxaString.startsWith("FOSSIL ")){
|
1149
|
higherTaxaString = higherTaxaString.replace("FOSSIL ", "");
|
1150
|
isFossil = true;
|
1151
|
}
|
1152
|
TaxonNode higherTaxon = getHigherTaxon(higherTaxaString, (IAPTImportState)state);
|
1153
|
|
1154
|
//Taxon
|
1155
|
Taxon taxon = makeTaxon(record, state, higherTaxon, isFossil);
|
1156
|
if (taxon == null){
|
1157
|
logger.warn(lineNumber + "taxon could not be created and is null");
|
1158
|
return;
|
1159
|
}
|
1160
|
((IAPTImportState)state).setCurrentTaxon(taxon);
|
1161
|
|
1162
|
// Registration
|
1163
|
IAPTRegData regData = makeIAPTRegData(state);
|
1164
|
ObjectMapper mapper = new ObjectMapper();
|
1165
|
try {
|
1166
|
String regdataJson = mapper.writeValueAsString(regData);
|
1167
|
Extension.NewInstance(taxon.getName(), regdataJson, getExtensionTypeIAPTRegData());
|
1168
|
getNameService().save(taxon.getName());
|
1169
|
} catch (JsonProcessingException e) {
|
1170
|
logger.error("Error on converting IAPTRegData", e);
|
1171
|
}
|
1172
|
|
1173
|
logger.info("#of imported Genera: " + ((IAPTImportState) state).getGenusTaxonMap().size());
|
1174
|
return;
|
1175
|
}
|
1176
|
|
1177
|
private boolean excludeFromImport(SimpleExcelTaxonImportState<CONFIG> state) {
|
1178
|
if(state.getConfig().isDoAlgeaeOnly()){
|
1179
|
boolean include = false;
|
1180
|
String higherTaxon = getValue(state.getOriginalRecord(), HIGHERTAXON, true);
|
1181
|
String fullNameStr = getValue(state.getOriginalRecord(), FULLNAME, true);
|
1182
|
include |= higherTaxon.matches(".*?PHYCEAE(?:$|\\s+)");
|
1183
|
for(String test : new String[]{
|
1184
|
"Bolidophyceae ",
|
1185
|
"Phaeothamniophyceae ",
|
1186
|
"Bolidomonadales ",
|
1187
|
"Bolidomonadaceae ",
|
1188
|
"Aureoumbra ",
|
1189
|
"Bolidomonas ",
|
1190
|
"Seagriefia ",
|
1191
|
"Navicula "
|
1192
|
}) {
|
1193
|
include |= fullNameStr.startsWith(test);
|
1194
|
}
|
1195
|
return !include;
|
1196
|
}
|
1197
|
|
1198
|
return false;
|
1199
|
}
|
1200
|
|
1201
|
private ExtensionType getExtensionTypeIAPTRegData() {
|
1202
|
if(extensionTypeIAPTRegData == null){
|
1203
|
extensionTypeIAPTRegData = ExtensionType.NewInstance("IAPTRegData.json", "IAPTRegData.json", "");
|
1204
|
getTermService().save(extensionTypeIAPTRegData);
|
1205
|
}
|
1206
|
return extensionTypeIAPTRegData;
|
1207
|
}
|
1208
|
|
1209
|
private IAPTRegData makeIAPTRegData(SimpleExcelTaxonImportState<CONFIG> state) {
|
1210
|
|
1211
|
HashMap<String, String> record = state.getOriginalRecord();
|
1212
|
String registrationStr = getValue(record, REGISTRATION);
|
1213
|
String regDateStr = getValue(record, REGDATE);
|
1214
|
String regStr = getValue(record, REGISTRATION, true);
|
1215
|
|
1216
|
String dateStr = null;
|
1217
|
String office = null;
|
1218
|
Integer regID = null;
|
1219
|
Integer formNo = null;
|
1220
|
|
1221
|
Matcher m = registrationPattern.matcher(registrationStr);
|
1222
|
if(m.matches()){
|
1223
|
dateStr = m.group("regdate");
|
1224
|
if(parseDate( regStr, dateStr) == null){
|
1225
|
// check for valid dates
|
1226
|
logger.warn(csvReportLine(regStr, REGISTRATION + ": could not parse date", dateStr, " in ", registrationStr));
|
1227
|
};
|
1228
|
office = m.group("office");
|
1229
|
regID = Integer.valueOf(m.group("regid"));
|
1230
|
try {
|
1231
|
formNo = Integer.valueOf(m.group("formNo"));
|
1232
|
} catch(IllegalArgumentException e){
|
1233
|
// ignore
|
1234
|
}
|
1235
|
} else {
|
1236
|
logger.warn(csvReportLine(regStr, REGISTRATION + ": could not be parsed", registrationStr));
|
1237
|
}
|
1238
|
IAPTRegData regData = new IAPTRegData(dateStr, office, regID, formNo);
|
1239
|
return regData;
|
1240
|
}
|
1241
|
|
1242
|
private TaxonNode getHigherTaxon(String higherTaxaString, IAPTImportState state) {
|
1243
|
String[] higherTaxaNames = higherTaxaString.toLowerCase().replaceAll("[\\[\\]]", "").split(":");
|
1244
|
TaxonNode higherTaxonNode = null;
|
1245
|
|
1246
|
ITaxonTreeNode rootNode = getClassificationRootNode(state);
|
1247
|
for (String htn : higherTaxaNames) {
|
1248
|
htn = StringUtils.capitalize(htn.trim());
|
1249
|
Taxon higherTaxon = state.getHigherTaxon(htn);
|
1250
|
if (higherTaxon != null){
|
1251
|
higherTaxonNode = higherTaxon.getTaxonNodes().iterator().next();
|
1252
|
}else{
|
1253
|
BotanicalName name = makeHigherTaxonName(state, htn);
|
1254
|
Reference sec = state.getSecReference();
|
1255
|
higherTaxon = Taxon.NewInstance(name, sec);
|
1256
|
getTaxonService().save(higherTaxon);
|
1257
|
higherTaxonNode = rootNode.addChildTaxon(higherTaxon, sec, null);
|
1258
|
state.putHigherTaxon(htn, higherTaxon);
|
1259
|
getClassificationService().saveTreeNode(higherTaxonNode);
|
1260
|
}
|
1261
|
rootNode = higherTaxonNode;
|
1262
|
}
|
1263
|
return higherTaxonNode;
|
1264
|
}
|
1265
|
|
1266
|
private BotanicalName makeHigherTaxonName(IAPTImportState state, String name) {
|
1267
|
|
1268
|
Rank rank = guessRank(name);
|
1269
|
|
1270
|
BotanicalName taxonName = BotanicalName.NewInstance(rank);
|
1271
|
taxonName.addSource(makeOriginalSource(state));
|
1272
|
taxonName.setGenusOrUninomial(StringUtils.capitalize(name));
|
1273
|
return taxonName;
|
1274
|
}
|
1275
|
|
1276
|
private Rank guessRank(String name) {
|
1277
|
|
1278
|
// normalize
|
1279
|
name = name.replaceAll("\\(.*\\)", "").trim();
|
1280
|
|
1281
|
if(name.matches("^Plantae$|^Fungi$")){
|
1282
|
return Rank.KINGDOM();
|
1283
|
} else if(name.matches("^Incertae sedis$|^No group assigned$")){
|
1284
|
return rankFamilyIncertisSedis();
|
1285
|
} else if(name.matches(".*phyta$|.*mycota$")){
|
1286
|
return Rank.PHYLUM();
|
1287
|
} else if(name.matches(".*phytina$|.*mycotina$")){
|
1288
|
return Rank.SUBPHYLUM();
|
1289
|
} else if(name.matches("Gymnospermae$|.*ones$")){ // Monocotyledones, Dicotyledones
|
1290
|
return rankUnrankedSupraGeneric();
|
1291
|
} else if(name.matches(".*opsida$|.*phyceae$|.*mycetes$|.*ones$|^Musci$|^Hepaticae$")){
|
1292
|
return Rank.CLASS();
|
1293
|
} else if(name.matches(".*idae$|.*phycidae$|.*mycetidae$")){
|
1294
|
return Rank.SUBCLASS();
|
1295
|
} else if(name.matches(".*ales$")){
|
1296
|
return Rank.ORDER();
|
1297
|
} else if(name.matches(".*ineae$")){
|
1298
|
return Rank.SUBORDER();
|
1299
|
} else if(name.matches(".*aceae$")){
|
1300
|
return Rank.FAMILY();
|
1301
|
} else if(name.matches(".*oideae$")){
|
1302
|
return Rank.SUBFAMILY();
|
1303
|
} else
|
1304
|
// if(name.matches(".*eae$")){
|
1305
|
// return Rank.TRIBE();
|
1306
|
// } else
|
1307
|
if(name.matches(".*inae$")){
|
1308
|
return Rank.SUBTRIBE();
|
1309
|
} else if(name.matches(".*ae$")){
|
1310
|
return Rank.FAMILY();
|
1311
|
}
|
1312
|
return Rank.UNKNOWN_RANK();
|
1313
|
}
|
1314
|
|
1315
|
private Rank rankUnrankedSupraGeneric() {
|
1316
|
|
1317
|
if(rankUnrankedSupraGeneric == null){
|
1318
|
rankUnrankedSupraGeneric = Rank.NewInstance(RankClass.Suprageneric, "Unranked supra generic", " ", " ");
|
1319
|
getTermService().save(rankUnrankedSupraGeneric);
|
1320
|
}
|
1321
|
return rankUnrankedSupraGeneric;
|
1322
|
}
|
1323
|
|
1324
|
private Rank rankFamilyIncertisSedis() {
|
1325
|
|
1326
|
if(familyIncertisSedis == null){
|
1327
|
familyIncertisSedis = Rank.NewInstance(RankClass.Suprageneric, "Family incertis sedis", " ", " ");
|
1328
|
getTermService().save(familyIncertisSedis);
|
1329
|
}
|
1330
|
return familyIncertisSedis;
|
1331
|
}
|
1332
|
|
1333
|
private AnnotationType annotationTypeCaveats(){
|
1334
|
if(annotationTypeCaveats == null){
|
1335
|
annotationTypeCaveats = AnnotationType.NewInstance("Caveats", "Caveats", "");
|
1336
|
getTermService().save(annotationTypeCaveats);
|
1337
|
}
|
1338
|
return annotationTypeCaveats;
|
1339
|
}
|
1340
|
|
1341
|
|
1342
|
/**
|
1343
|
* @param state
|
1344
|
* @return
|
1345
|
*/
|
1346
|
private IdentifiableSource makeOriginalSource(IAPTImportState state) {
|
1347
|
return IdentifiableSource.NewDataImportInstance("line: " + state.getCurrentLine(), null, state.getConfig().getSourceReference());
|
1348
|
}
|
1349
|
|
1350
|
|
1351
|
private Reference makeReference(IAPTImportState state, UUID uuidRef) {
|
1352
|
Reference ref = state.getReference(uuidRef);
|
1353
|
if (ref == null){
|
1354
|
ref = getReferenceService().find(uuidRef);
|
1355
|
state.putReference(uuidRef, ref);
|
1356
|
}
|
1357
|
return ref;
|
1358
|
}
|
1359
|
|
1360
|
private MarkerType markerTypeFossil(){
|
1361
|
if(this.markerTypeFossil == null){
|
1362
|
markerTypeFossil = MarkerType.NewInstance("isFossilTaxon", "isFossil", null);
|
1363
|
getTermService().save(this.markerTypeFossil);
|
1364
|
}
|
1365
|
return markerTypeFossil;
|
1366
|
}
|
1367
|
|
1368
|
private MarkerType markerDuplicateRegistration(){
|
1369
|
if(this.duplicateRegistration == null){
|
1370
|
duplicateRegistration = MarkerType.NewInstance("duplicateRegistration", "duplicateRegistration", null);
|
1371
|
getTermService().save(this.duplicateRegistration);
|
1372
|
}
|
1373
|
return markerTypeFossil;
|
1374
|
}
|
1375
|
|
1376
|
private String csvReportLine(String regId, String message, String ... fields){
|
1377
|
StringBuilder out = new StringBuilder("regID#");
|
1378
|
out.append(regId).append(",\"").append(message).append('"');
|
1379
|
|
1380
|
for(String f : fields){
|
1381
|
out.append(",\"").append(f).append('"');
|
1382
|
}
|
1383
|
return out.toString();
|
1384
|
}
|
1385
|
|
1386
|
|
1387
|
}
|