|
1 |
/**
|
|
2 |
* Copyright (C) 2017 EDIT
|
|
3 |
* European Distributed Institute of Taxonomy
|
|
4 |
* http://www.e-taxonomy.eu
|
|
5 |
*
|
|
6 |
* The contents of this file are subject to the Mozilla Public License Version 1.1
|
|
7 |
* See LICENSE.TXT at the top of this package for the full license terms.
|
|
8 |
*/
|
|
9 |
package eu.etaxonomy.cdm.io.euromed;
|
|
10 |
|
|
11 |
import java.util.HashMap;
|
|
12 |
import java.util.Map;
|
|
13 |
import java.util.Set;
|
|
14 |
import java.util.UUID;
|
|
15 |
import java.util.regex.Matcher;
|
|
16 |
import java.util.regex.Pattern;
|
|
17 |
|
|
18 |
import org.apache.log4j.Logger;
|
|
19 |
import org.springframework.stereotype.Component;
|
|
20 |
import org.springframework.transaction.TransactionStatus;
|
|
21 |
|
|
22 |
import eu.etaxonomy.cdm.io.berlinModel.BerlinModelTransformer;
|
|
23 |
import eu.etaxonomy.cdm.io.common.utils.ImportDeduplicationHelper;
|
|
24 |
import eu.etaxonomy.cdm.io.mexico.SimpleExcelTaxonImport;
|
|
25 |
import eu.etaxonomy.cdm.io.mexico.SimpleExcelTaxonImportState;
|
|
26 |
import eu.etaxonomy.cdm.model.agent.TeamOrPersonBase;
|
|
27 |
import eu.etaxonomy.cdm.model.common.Language;
|
|
28 |
import eu.etaxonomy.cdm.model.common.Marker;
|
|
29 |
import eu.etaxonomy.cdm.model.common.MarkerType;
|
|
30 |
import eu.etaxonomy.cdm.model.common.VerbatimTimePeriod;
|
|
31 |
import eu.etaxonomy.cdm.model.description.Distribution;
|
|
32 |
import eu.etaxonomy.cdm.model.description.Feature;
|
|
33 |
import eu.etaxonomy.cdm.model.description.PresenceAbsenceTerm;
|
|
34 |
import eu.etaxonomy.cdm.model.description.TaxonDescription;
|
|
35 |
import eu.etaxonomy.cdm.model.description.TaxonNameDescription;
|
|
36 |
import eu.etaxonomy.cdm.model.description.TextData;
|
|
37 |
import eu.etaxonomy.cdm.model.location.NamedArea;
|
|
38 |
import eu.etaxonomy.cdm.model.name.Rank;
|
|
39 |
import eu.etaxonomy.cdm.model.name.TaxonName;
|
|
40 |
import eu.etaxonomy.cdm.model.name.TaxonNameFactory;
|
|
41 |
import eu.etaxonomy.cdm.model.reference.IArticle;
|
|
42 |
import eu.etaxonomy.cdm.model.reference.IBook;
|
|
43 |
import eu.etaxonomy.cdm.model.reference.IBookSection;
|
|
44 |
import eu.etaxonomy.cdm.model.reference.ISourceable;
|
|
45 |
import eu.etaxonomy.cdm.model.reference.Reference;
|
|
46 |
import eu.etaxonomy.cdm.model.reference.ReferenceFactory;
|
|
47 |
import eu.etaxonomy.cdm.model.taxon.Taxon;
|
|
48 |
import eu.etaxonomy.cdm.model.taxon.TaxonNode;
|
|
49 |
import eu.etaxonomy.cdm.model.term.TermVocabulary;
|
|
50 |
import eu.etaxonomy.cdm.strategy.parser.NonViralNameParserImpl;
|
|
51 |
import eu.etaxonomy.cdm.strategy.parser.TimePeriodParser;
|
|
52 |
|
|
53 |
/**
|
|
54 |
* @author a.mueller
|
|
55 |
* @since 23.10.2019
|
|
56 |
*/
|
|
57 |
@Component
|
|
58 |
public class IpniImport<CONFIG extends IpniImportConfigurator>
|
|
59 |
extends SimpleExcelTaxonImport<CONFIG> {
|
|
60 |
|
|
61 |
private static final long serialVersionUID = -6691694003401153408L;
|
|
62 |
|
|
63 |
private static final Logger logger = Logger.getLogger(IpniImport.class);
|
|
64 |
|
|
65 |
private static final String ID_COL = "EDIT-Genus-Taxon-ID";
|
|
66 |
private static final String EDIT_GENUS = "EDIT-Genus";
|
|
67 |
private static final String GENUS = "genus";
|
|
68 |
private static final String SPECIES = "species";
|
|
69 |
private static final String INFRA_SPECIES = "infraspecies";
|
|
70 |
private static final String NAMECACHE = "full_name_without_family_and_authors";
|
|
71 |
private static final String RANK = "rank";
|
|
72 |
private static final String AUTHORS = "EMauthors";
|
|
73 |
private static final String PTYPE = "PType";
|
|
74 |
private static final String YEAR = "publication_year";
|
|
75 |
private static final String PUBLICATION = "publication";
|
|
76 |
private static final String EM_COLLATION = "EMCollation";
|
|
77 |
private static final String REFERENCE_REMARKS = "reference_remarks";
|
|
78 |
private static final String EM_GEO = "EM-geo";
|
|
79 |
|
|
80 |
private Map<String,NamedArea> areaMap;
|
|
81 |
|
|
82 |
private ImportDeduplicationHelper<SimpleExcelTaxonImportState<?>> deduplicationHelper;
|
|
83 |
private NonViralNameParserImpl parser = NonViralNameParserImpl.NewInstance();
|
|
84 |
|
|
85 |
|
|
86 |
@Override
|
|
87 |
protected String getWorksheetName(CONFIG config) {
|
|
88 |
return "_11_IPNI_name_w_EM_genus_tax_m2";
|
|
89 |
}
|
|
90 |
|
|
91 |
private boolean isFirst = true;
|
|
92 |
private TransactionStatus tx = null;
|
|
93 |
private Map<UUID,TaxonNode> genusNodeMap = new HashMap<>();
|
|
94 |
|
|
95 |
@Override
|
|
96 |
protected void firstPass(SimpleExcelTaxonImportState<CONFIG> state) {
|
|
97 |
if (isFirst){
|
|
98 |
tx = this.startTransaction();
|
|
99 |
isFirst = false;
|
|
100 |
}
|
|
101 |
getAreaMap();
|
|
102 |
|
|
103 |
String line = state.getCurrentLine() + ": ";
|
|
104 |
Map<String, String> record = state.getOriginalRecord();
|
|
105 |
|
|
106 |
String genusUuidStr = getValue(record, ID_COL);
|
|
107 |
UUID genusUuid = UUID.fromString(genusUuidStr);
|
|
108 |
TaxonNode genusNode = genusNodeMap.get(genusUuid);
|
|
109 |
|
|
110 |
if (genusNode == null){
|
|
111 |
genusNode = getGenusNode(state, genusUuid);
|
|
112 |
}
|
|
113 |
if (genusNode == null){
|
|
114 |
return;
|
|
115 |
}
|
|
116 |
|
|
117 |
Rank rank = getRank(state);
|
|
118 |
TaxonName taxonName = makeName(state, line, rank);
|
|
119 |
TaxonNode parent = getParent(state, line, genusNode, taxonName, rank);
|
|
120 |
Reference sec = parent.getTaxon().getSec();
|
|
121 |
Taxon taxon = Taxon.NewInstance(taxonName, sec);
|
|
122 |
TaxonNode childNode = parent.addChildTaxon(taxon, null, null); //E+M taxon nodes usually do not have a citation
|
|
123 |
getTaxonNodeService().saveOrUpdate(childNode);
|
|
124 |
|
|
125 |
makeDistribution(state, line, taxon);
|
|
126 |
addImportSource(state, taxon);
|
|
127 |
}
|
|
128 |
|
|
129 |
private void addImportSource(SimpleExcelTaxonImportState<CONFIG> state, ISourceable<?> sourceable) {
|
|
130 |
sourceable.addImportSource("row: "+state.getCurrentLine(), "_11_IPNI_name_w_EM_genus_tax_m2", getSourceReference(state), null);
|
|
131 |
}
|
|
132 |
|
|
133 |
private void makeDistribution(SimpleExcelTaxonImportState<CONFIG> state, String line, Taxon taxon) {
|
|
134 |
|
|
135 |
//E+M
|
|
136 |
NamedArea euroMedArea = getAreaMap().get("EM");
|
|
137 |
if (euroMedArea == null){
|
|
138 |
logger.warn("Euro+Med area not found");
|
|
139 |
}
|
|
140 |
TaxonDescription desc = TaxonDescription.NewInstance(taxon);
|
|
141 |
Distribution endemicDistribution = Distribution.NewInstance(euroMedArea, PresenceAbsenceTerm.ENDEMIC_FOR_THE_RELEVANT_AREA());
|
|
142 |
desc.addElement(endemicDistribution);
|
|
143 |
|
|
144 |
//single areas
|
|
145 |
Map<String, String> record = state.getOriginalRecord();
|
|
146 |
String allAreaStr = getValue(record, EM_GEO);
|
|
147 |
if(isBlank(allAreaStr)){
|
|
148 |
logger.warn(line+"No distribution data exists.");
|
|
149 |
}else{
|
|
150 |
String[] areaSplit = allAreaStr.split(",");
|
|
151 |
for (String areaStr: areaSplit){
|
|
152 |
NamedArea area = getAreaMap().get(areaStr.trim());
|
|
153 |
if (area == null){
|
|
154 |
logger.warn(line+"Area could not be recognized: " + areaStr.trim());
|
|
155 |
}else{
|
|
156 |
//all distributions are native and are usually endemic, endemism will be checked later (ERS 2019-10-24)
|
|
157 |
Distribution distribution = Distribution.NewInstance(area, PresenceAbsenceTerm.NATIVE());
|
|
158 |
desc.addElement(distribution);
|
|
159 |
}
|
|
160 |
}
|
|
161 |
}
|
|
162 |
}
|
|
163 |
|
|
164 |
private Map<String, NamedArea> getAreaMap() {
|
|
165 |
if (areaMap == null){
|
|
166 |
makeAreaMap();
|
|
167 |
}
|
|
168 |
return areaMap;
|
|
169 |
}
|
|
170 |
|
|
171 |
private void makeAreaMap() {
|
|
172 |
areaMap = new HashMap<>();
|
|
173 |
@SuppressWarnings("unchecked")
|
|
174 |
TermVocabulary<NamedArea> emAreaVoc = getVocabularyService().find(BerlinModelTransformer.uuidVocEuroMedAreas);
|
|
175 |
for (NamedArea area: emAreaVoc.getTerms()){
|
|
176 |
areaMap.put(area.getIdInVocabulary(), area);
|
|
177 |
}
|
|
178 |
}
|
|
179 |
|
|
180 |
private TaxonNode getParent(SimpleExcelTaxonImportState<CONFIG> state, String line, TaxonNode genusNode,
|
|
181 |
TaxonName taxonName, Rank rank) {
|
|
182 |
if (rank.equals(Rank.SPECIES())){
|
|
183 |
return genusNode;
|
|
184 |
}else{
|
|
185 |
TaxonNode speciesNode = getSpecies(state, line, taxonName, genusNode);
|
|
186 |
if (speciesNode == null){
|
|
187 |
logger.warn(line + "Species for infraspecies not found. Added to genus: " + taxonName.getTitleCache());
|
|
188 |
return genusNode;
|
|
189 |
}else{
|
|
190 |
return speciesNode;
|
|
191 |
}
|
|
192 |
}
|
|
193 |
}
|
|
194 |
|
|
195 |
private TaxonNode getSpecies(SimpleExcelTaxonImportState<CONFIG> state, String line, TaxonName taxonName, TaxonNode genusNode) {
|
|
196 |
String specEpi = taxonName.getSpecificEpithet();
|
|
197 |
TaxonNode result = null;
|
|
198 |
for (TaxonNode child : genusNode.getChildNodes()){
|
|
199 |
Rank childRank = child.getTaxon().getName().getRank();
|
|
200 |
if (childRank.isHigher(Rank.SPECIES())){
|
|
201 |
result = getSpecies(state, line, taxonName, child);
|
|
202 |
}else if (childRank.isHigher(Rank.SPECIES())){
|
|
203 |
//do nothing
|
|
204 |
}else if (childRank.equals(Rank.SPECIES()) && specEpi.equals(child.getTaxon().getName().getSpecificEpithet())){
|
|
205 |
result = child;
|
|
206 |
}
|
|
207 |
if (result != null){
|
|
208 |
return result;
|
|
209 |
}
|
|
210 |
}
|
|
211 |
logger.debug(line+"No species found for subspecies " + taxonName.getTitleCache());
|
|
212 |
return null;
|
|
213 |
}
|
|
214 |
|
|
215 |
private TaxonName makeName(SimpleExcelTaxonImportState<CONFIG> state, String line, Rank rank) {
|
|
216 |
TaxonName name = TaxonNameFactory.NewBotanicalInstance(rank);
|
|
217 |
Map<String, String> record = state.getOriginalRecord();
|
|
218 |
String genusStr = getValue(record, GENUS);
|
|
219 |
String speciesStr = getValue(record, SPECIES);
|
|
220 |
String infraSpeciesStr = getValue(record, INFRA_SPECIES);
|
|
221 |
String nameCache = getValue(record, NAMECACHE);
|
|
222 |
name.setGenusOrUninomial(genusStr);
|
|
223 |
name.setSpecificEpithet(speciesStr);
|
|
224 |
name.setInfraSpecificEpithet(infraSpeciesStr);
|
|
225 |
if (!nameCache.equals(name.getNameCache())){
|
|
226 |
logger.warn(line + "Namecache not equal: " + nameCache +" <-> " + name.getNameCache());
|
|
227 |
}
|
|
228 |
TeamOrPersonBase<?> authors = getAuthors(state, line);
|
|
229 |
//all authors are combination authors, no basionym authors exist, according to ERS 2019-10-24
|
|
230 |
name.setCombinationAuthorship(authors);
|
|
231 |
Reference ref = getReference(state, line, authors);
|
|
232 |
name.setNomenclaturalReference(ref);
|
|
233 |
String[] collSplit = getCollationSplit(state, line);
|
|
234 |
name.setNomenclaturalMicroReference(collSplit[1]);
|
|
235 |
makeNameRemarks(state, line, name);
|
|
236 |
|
|
237 |
addImportSource(state, name);
|
|
238 |
return name;
|
|
239 |
}
|
|
240 |
|
|
241 |
@SuppressWarnings("deprecation")
|
|
242 |
private void makeNameRemarks(SimpleExcelTaxonImportState<CONFIG> state, String line, TaxonName name) {
|
|
243 |
Map<String, String> record = state.getOriginalRecord();
|
|
244 |
String remarksStr = getValue(record, REFERENCE_REMARKS);
|
|
245 |
if (isBlank(remarksStr) || remarksStr.equals("[epublished]")||remarksStr.equals("(epublished)")){
|
|
246 |
return;
|
|
247 |
}
|
|
248 |
remarksStr = remarksStr.replace("[epublished]", "").trim();
|
|
249 |
if (remarksStr.startsWith("(as")){
|
|
250 |
remarksStr = remarksStr.substring(1, remarksStr.length()-1);
|
|
251 |
}
|
|
252 |
if (remarksStr.startsWith(";")){
|
|
253 |
TaxonNameDescription desc = TaxonNameDescription.NewInstance(name);
|
|
254 |
TextData textData = TextData.NewInstance(Feature.ADDITIONAL_PUBLICATION());
|
|
255 |
textData.putText(Language.ENGLISH(), remarksStr.substring(1).trim());
|
|
256 |
desc.addElement(textData);
|
|
257 |
}else{
|
|
258 |
if (remarksStr.startsWith(";")){
|
|
259 |
remarksStr = remarksStr.substring(1).trim();
|
|
260 |
}
|
|
261 |
String regExStr = "^,?\\s*as ['\"]([\\-a-z]+)['\"]$";
|
|
262 |
Matcher matcher = Pattern.compile(regExStr).matcher(remarksStr);
|
|
263 |
if (!matcher.matches()){
|
|
264 |
logger.warn("name remark does not match: " + remarksStr);
|
|
265 |
}else{
|
|
266 |
String origSpelling = matcher.group(1);
|
|
267 |
TaxonName origName = TaxonNameFactory.NewBotanicalInstance(name.getRank());
|
|
268 |
getNameService().save(origName);
|
|
269 |
origName.setGenusOrUninomial(name.getGenusOrUninomial());
|
|
270 |
if (name.isSpecies()){
|
|
271 |
origName.setSpecificEpithet(origSpelling);
|
|
272 |
}else{
|
|
273 |
origName.setSpecificEpithet(name.getSpecificEpithet());
|
|
274 |
origName.setInfraSpecificEpithet(origSpelling);
|
|
275 |
}
|
|
276 |
name.addOriginalSpelling(origName, getSourceReference(state), null);
|
|
277 |
}
|
|
278 |
}
|
|
279 |
}
|
|
280 |
|
|
281 |
private Reference getReference(SimpleExcelTaxonImportState<CONFIG> state, String line,
|
|
282 |
TeamOrPersonBase<?> authors) {
|
|
283 |
Map<String, String> record = state.getOriginalRecord();
|
|
284 |
String pTypeStr = getValue(record, PTYPE);
|
|
285 |
Reference result;
|
|
286 |
if("AR".equals(pTypeStr)){
|
|
287 |
result = ReferenceFactory.newArticle();
|
|
288 |
IArticle article = result;
|
|
289 |
Reference journal = getJournal(state, line);
|
|
290 |
article.setInJournal(journal);
|
|
291 |
String[] collSplit = getCollationSplit(state, line);
|
|
292 |
article.setVolume(collSplit[0]);
|
|
293 |
article.setDatePublished(getYear(state, line));
|
|
294 |
makeReferenceRemarks(state, line, article);
|
|
295 |
}else if ("BS".equals(pTypeStr)){
|
|
296 |
result = ReferenceFactory.newBookSection();
|
|
297 |
IBookSection section = result;
|
|
298 |
Reference book = getBook(state, line);
|
|
299 |
section.setInBook(book);
|
|
300 |
String[] collSplit = getCollationSplit(state, line);
|
|
301 |
book.setVolume(collSplit[0]);
|
|
302 |
book.setDatePublished(getYear(state, line));
|
|
303 |
//TODO in-authors (woher nehmen?)
|
|
304 |
}else if ("BO".equals(pTypeStr)){
|
|
305 |
result = getBook(state, line);
|
|
306 |
IBook book = result;
|
|
307 |
String[] collSplit = getCollationSplit(state, line);
|
|
308 |
book.setVolume(collSplit[0]);
|
|
309 |
book.setDatePublished(getYear(state, line));
|
|
310 |
}else{
|
|
311 |
logger.warn(line + "Reference type not recognized: " + pTypeStr);
|
|
312 |
return null;
|
|
313 |
}
|
|
314 |
result.setAuthorship(authors);
|
|
315 |
//TODO deduplicate references
|
|
316 |
//TODO add source to references
|
|
317 |
// addImportSource(state, result);
|
|
318 |
return result;
|
|
319 |
}
|
|
320 |
|
|
321 |
private void makeReferenceRemarks(SimpleExcelTaxonImportState<CONFIG> state, String line, IArticle article) {
|
|
322 |
Map<String, String> record = state.getOriginalRecord();
|
|
323 |
String remarksStr = getValue(record, REFERENCE_REMARKS);
|
|
324 |
if (isBlank(remarksStr)){
|
|
325 |
return;
|
|
326 |
}
|
|
327 |
if (remarksStr.contains("epublished")){
|
|
328 |
MarkerType epublished = getMarkerType(state, MarkerType.uuidEpublished, "epublished", "epublished", null);
|
|
329 |
article.addMarker(Marker.NewInstance(epublished, true));
|
|
330 |
}
|
|
331 |
|
|
332 |
}
|
|
333 |
|
|
334 |
private String[] getCollationSplit(SimpleExcelTaxonImportState<CONFIG> state, String line) {
|
|
335 |
Map<String, String> record = state.getOriginalRecord();
|
|
336 |
String collationStr = getValue(record, EM_COLLATION);
|
|
337 |
String[] split = collationStr.split(":");
|
|
338 |
if (split.length == 2){
|
|
339 |
split[0] = split[0].trim();
|
|
340 |
split[1] = split[1].trim();
|
|
341 |
return split;
|
|
342 |
}else if (split.length == 1){
|
|
343 |
String[] result = new String[2];
|
|
344 |
result[0] = null;
|
|
345 |
result[1] = split[0].trim();
|
|
346 |
return result;
|
|
347 |
}else{
|
|
348 |
logger.warn(line+"Collation string not recognized: " + collationStr);
|
|
349 |
return new String[2];
|
|
350 |
}
|
|
351 |
}
|
|
352 |
|
|
353 |
private Reference getBook(SimpleExcelTaxonImportState<CONFIG> state, String line) {
|
|
354 |
Map<String, String> record = state.getOriginalRecord();
|
|
355 |
String publicationStr = getValue(record, PUBLICATION);
|
|
356 |
Reference result = ReferenceFactory.newBook();
|
|
357 |
result.setAbbrevTitle(publicationStr);
|
|
358 |
return result;
|
|
359 |
}
|
|
360 |
|
|
361 |
private Reference getJournal(SimpleExcelTaxonImportState<CONFIG> state, String line) {
|
|
362 |
Map<String, String> record = state.getOriginalRecord();
|
|
363 |
String publicationStr = getValue(record, PUBLICATION);
|
|
364 |
Reference result = ReferenceFactory.newJournal();
|
|
365 |
result.setAbbrevTitle(publicationStr);
|
|
366 |
return result;
|
|
367 |
}
|
|
368 |
|
|
369 |
private VerbatimTimePeriod getYear(SimpleExcelTaxonImportState<CONFIG> state, String line) {
|
|
370 |
Map<String, String> record = state.getOriginalRecord();
|
|
371 |
String yearStr = getValue(record, YEAR);
|
|
372 |
VerbatimTimePeriod result = TimePeriodParser.parseStringVerbatim(yearStr);
|
|
373 |
return result;
|
|
374 |
}
|
|
375 |
|
|
376 |
private TeamOrPersonBase<?> getAuthors(SimpleExcelTaxonImportState<CONFIG> state, String line) {
|
|
377 |
Map<String, String> record = state.getOriginalRecord();
|
|
378 |
String authorsStr = getValue(record, AUTHORS);
|
|
379 |
TeamOrPersonBase<?> newAuthor = parser.author(authorsStr);
|
|
380 |
TeamOrPersonBase<?> author = newAuthor; //deduplicationHelper().getExistingAuthor(state, newAuthor);
|
|
381 |
//TODO check parsing + deduplication of authors
|
|
382 |
return author;
|
|
383 |
}
|
|
384 |
|
|
385 |
@SuppressWarnings("unchecked")
|
|
386 |
private ImportDeduplicationHelper<SimpleExcelTaxonImportState<?>> deduplicationHelper() {
|
|
387 |
if (deduplicationHelper == null){
|
|
388 |
deduplicationHelper = (ImportDeduplicationHelper<SimpleExcelTaxonImportState<?>>)ImportDeduplicationHelper.NewInstance(this);
|
|
389 |
}
|
|
390 |
return deduplicationHelper;
|
|
391 |
}
|
|
392 |
|
|
393 |
private Rank getRank(SimpleExcelTaxonImportState<CONFIG> state) {
|
|
394 |
Map<String, String> record = state.getOriginalRecord();
|
|
395 |
String rankStr = getValue(record, RANK);
|
|
396 |
if ("spec.".equals(rankStr)){
|
|
397 |
return Rank.SPECIES();
|
|
398 |
}else if ("subsp.".equals(rankStr)){
|
|
399 |
return Rank.SUBSPECIES();
|
|
400 |
}else{
|
|
401 |
logger.warn("Unknown rank: " + rankStr);
|
|
402 |
return null;
|
|
403 |
}
|
|
404 |
}
|
|
405 |
|
|
406 |
private TaxonNode getGenusNode(SimpleExcelTaxonImportState<CONFIG> state, UUID genusUuid) {
|
|
407 |
Taxon genusTaxon = (Taxon)getTaxonService().find(genusUuid);
|
|
408 |
validateGenus(state, genusTaxon);
|
|
409 |
Set<TaxonNode> nodes = genusTaxon.getTaxonNodes();
|
|
410 |
if (nodes.size()==0){
|
|
411 |
logger.warn("No genus node: " + genusTaxon.getTitleCache());
|
|
412 |
}else if (nodes.size()>1){
|
|
413 |
logger.warn("More than 1 genus node: " + genusTaxon.getTitleCache());
|
|
414 |
}else{
|
|
415 |
TaxonNode result = nodes.iterator().next();
|
|
416 |
genusNodeMap.put(genusUuid, result);
|
|
417 |
return result;
|
|
418 |
}
|
|
419 |
return null;
|
|
420 |
}
|
|
421 |
|
|
422 |
private void validateGenus(SimpleExcelTaxonImportState<CONFIG> state, Taxon genusTaxon) {
|
|
423 |
Map<String, String> record = state.getOriginalRecord();
|
|
424 |
String editGenus = getValue(record, EDIT_GENUS);
|
|
425 |
if (!editGenus.equals(genusTaxon.getName().getTitleCache())){
|
|
426 |
logger.warn("Full genus not equal: " + editGenus +" <-> "+genusTaxon.getName().getTitleCache());
|
|
427 |
}
|
|
428 |
String genus = getValue(record, GENUS);
|
|
429 |
if (!genus.equals(genusTaxon.getName().getNameCache())){
|
|
430 |
logger.warn("Genus not equal: " + genus +" <-> "+genusTaxon.getName().getNameCache());
|
|
431 |
}
|
|
432 |
}
|
|
433 |
|
|
434 |
|
|
435 |
@Override
|
|
436 |
protected void secondPass(SimpleExcelTaxonImportState<CONFIG> state) {
|
|
437 |
if (tx != null){
|
|
438 |
this.commitTransaction(tx);
|
|
439 |
tx = null;
|
|
440 |
}
|
|
441 |
}
|
|
442 |
}
|
ref #8612 first implementation of E+MIpniImport