Unifiy name and taxon creation
[cdmlib-apps.git] / app-import / src / main / java / eu / etaxonomy / cdm / io / algaterra / AlgaTerraEcoFactImport.java
1 /**
2 * Copyright (C) 2007 EDIT
3 * European Distributed Institute of Taxonomy
4 * http://www.e-taxonomy.eu
5 *
6 * The contents of this file are subject to the Mozilla Public License Version 1.1
7 * See LICENSE.TXT at the top of this package for the full license terms.
8 */
9
10 package eu.etaxonomy.cdm.io.algaterra;
11
12 import java.sql.ResultSet;
13 import java.sql.SQLException;
14 import java.util.HashMap;
15 import java.util.HashSet;
16 import java.util.Map;
17 import java.util.Set;
18 import java.util.UUID;
19
20 import org.apache.commons.lang.StringUtils;
21 import org.apache.log4j.Logger;
22 import org.springframework.stereotype.Component;
23
24 import eu.etaxonomy.cdm.api.facade.DerivedUnitFacade;
25 import eu.etaxonomy.cdm.io.algaterra.validation.AlgaTerraSpecimenImportValidator;
26 import eu.etaxonomy.cdm.io.berlinModel.in.BerlinModelImportConfigurator;
27 import eu.etaxonomy.cdm.io.berlinModel.in.BerlinModelImportState;
28 import eu.etaxonomy.cdm.io.common.IOValidator;
29 import eu.etaxonomy.cdm.io.common.ResultSetPartitioner;
30 import eu.etaxonomy.cdm.io.common.mapping.UndefinedTransformerMethodException;
31 import eu.etaxonomy.cdm.model.common.CdmBase;
32 import eu.etaxonomy.cdm.model.common.DefinedTerm;
33 import eu.etaxonomy.cdm.model.common.DefinedTermBase;
34 import eu.etaxonomy.cdm.model.common.Language;
35 import eu.etaxonomy.cdm.model.common.Marker;
36 import eu.etaxonomy.cdm.model.common.MarkerType;
37 import eu.etaxonomy.cdm.model.common.TermType;
38 import eu.etaxonomy.cdm.model.common.TermVocabulary;
39 import eu.etaxonomy.cdm.model.description.CategoricalData;
40 import eu.etaxonomy.cdm.model.description.DescriptionBase;
41 import eu.etaxonomy.cdm.model.description.Feature;
42 import eu.etaxonomy.cdm.model.description.MeasurementUnit;
43 import eu.etaxonomy.cdm.model.description.QuantitativeData;
44 import eu.etaxonomy.cdm.model.description.State;
45 import eu.etaxonomy.cdm.model.description.StatisticalMeasure;
46 import eu.etaxonomy.cdm.model.description.StatisticalMeasurementValue;
47 import eu.etaxonomy.cdm.model.description.TextData;
48 import eu.etaxonomy.cdm.model.occurrence.Collection;
49 import eu.etaxonomy.cdm.model.occurrence.DerivedUnit;
50 import eu.etaxonomy.cdm.model.occurrence.FieldUnit;
51 import eu.etaxonomy.cdm.model.occurrence.SpecimenOrObservationBase;
52 import eu.etaxonomy.cdm.model.occurrence.SpecimenOrObservationType;
53 import eu.etaxonomy.cdm.model.reference.Reference;
54
55
56 /**
57 * @author a.mueller
58 * @created 01.09.2012
59 */
60 @Component
61 public class AlgaTerraEcoFactImport extends AlgaTerraSpecimenImportBase {
62 private static final Logger logger = Logger.getLogger(AlgaTerraEcoFactImport.class);
63
64
65 private static int modCount = 5000;
66 private static final String pluralString = "eco facts";
67 private static final String dbTableName = "EcoFact"; //??
68
69
70 public AlgaTerraEcoFactImport(){
71 super(dbTableName, pluralString);
72 }
73
74 @Override
75 protected String getIdQuery(BerlinModelImportState state) {
76 String result = " SELECT EcoFactId " +
77 " FROM EcoFact " +
78 " ORDER BY EcoFact.DuplicateFk, EcoFact.EcoFactId ";
79 return result;
80 }
81
82 @Override
83 protected String getRecordQuery(BerlinModelImportConfigurator config) {
84 String strQuery =
85 " SELECT EcoFact.*, EcoFact.EcoFactId as unitId, " +
86 " tg.ID AS GazetteerId, tg.L1Code, tg.L2Code, tg.L3Code, tg.L4Code, tg.Country, tg.ISOCountry, tg.subL4, " +
87 " ec.UUID as climateUuid, eh.UUID as habitatUuid, elf.UUID as lifeFormUuid " +
88 " FROM EcoFact " +
89 " LEFT OUTER JOIN TDWGGazetteer tg ON EcoFact.TDWGGazetteerFk = tg.ID " +
90 " LEFT OUTER JOIN EcoClimate ec ON EcoFact.ClimateFk = ec.ClimateId " +
91 " LEFT OUTER JOIN EcoHabitat eh ON EcoFact.HabitatFk = eh.HabitatId " +
92 " LEFT OUTER JOIN EcoLifeForm elf ON EcoFact.LifeFormFk = elf.LifeFormId " +
93 " WHERE (EcoFact.EcoFactId IN (" + ID_LIST_TOKEN + ") )"
94 + " ORDER BY EcoFact.DuplicateFk, EcoFact.EcoFactId "
95 ;
96 return strQuery;
97 }
98
99 @Override
100 public boolean doPartition(ResultSetPartitioner partitioner, BerlinModelImportState bmState) {
101 boolean success = true;
102
103 AlgaTerraImportState state = (AlgaTerraImportState)bmState;
104 try {
105 makeVocabulariesAndFeatures(state);
106 } catch (SQLException e1) {
107 logger.warn("Exception occurred when trying to create Ecofact vocabularies: " + e1.getMessage());
108 e1.printStackTrace();
109 }
110 Set<SpecimenOrObservationBase> objectsToSave = new HashSet<SpecimenOrObservationBase>();
111
112 //TODO do we still need this map? EcoFacts are not handled separate from Facts.
113 //However, they have duplicates on derived unit level. Also check duplicateFk.
114 Map<String, FieldUnit> ecoFactFieldObservationMap = (Map<String, FieldUnit>) partitioner.getObjectMap(ECO_FACT_FIELD_OBSERVATION_NAMESPACE);
115
116 ResultSet rs = partitioner.getResultSet();
117
118 try {
119
120 int i = 0;
121
122 //for each reference
123 while (rs.next()){
124
125 if ((i++ % modCount) == 0 && i!= 1 ){ logger.info(pluralString + " handled: " + (i-1));}
126
127 int ecoFactId = rs.getInt("EcoFactId");
128 Integer duplicateFk = nullSafeInt(rs, "DuplicateFk");
129
130 //FIXME RecordBasis is in Fact table, which is not part of the query anymore.
131 //Some EcoFacts have multiple RecordBasis types in Fact. Henning will check this.
132 // String recordBasis = rs.getString("RecordBasis");
133 String recordBasis = "PreservedSpecimen";
134
135 try {
136
137 //source ref
138 Reference<?> sourceRef = state.getTransactionalSourceReference();
139
140 //facade
141 SpecimenOrObservationType type = makeDerivedUnitType(recordBasis);
142
143 DerivedUnitFacade facade;
144 //field observation
145 if (duplicateFk == null){
146 facade = DerivedUnitFacade.NewInstance(type);
147 handleFieldObservationSpecimen(rs, facade, state, partitioner);
148 handleEcoFactSpecificFieldObservation(rs,facade, state);
149 FieldUnit fieldObservation = facade.getFieldUnit(true);
150 ecoFactFieldObservationMap.put(String.valueOf(ecoFactId), fieldObservation);
151 }else{
152 FieldUnit fieldObservation = ecoFactFieldObservationMap.get(String.valueOf(duplicateFk));
153 facade = DerivedUnitFacade.NewInstance(type, fieldObservation);
154 }
155
156 handleFirstDerivedSpecimen(rs, facade, state, partitioner);
157 handleEcoFactSpecificDerivedUnit(rs,facade, state);
158
159
160 DerivedUnit objectToSave = facade.innerDerivedUnit();
161 objectsToSave.add(objectToSave);
162
163
164 } catch (Exception e) {
165 logger.warn("Exception in ecoFact: ecoFactId " + ecoFactId + ". " + e.getMessage());
166 e.printStackTrace();
167 }
168
169 }
170
171 logger.warn("Specimen to save: " + objectsToSave.size());
172 getOccurrenceService().save(objectsToSave);
173
174 return success;
175 } catch (SQLException e) {
176 logger.error("SQLException:" + e);
177 return false;
178 }
179 }
180
181 protected String getDerivedUnitNameSpace(){
182 return ECO_FACT_DERIVED_UNIT_NAMESPACE;
183 }
184
185 protected String getFieldObservationNameSpace(){
186 return ECO_FACT_FIELD_OBSERVATION_NAMESPACE;
187 }
188
189
190
191 private void handleEcoFactSpecificFieldObservation(ResultSet rs, DerivedUnitFacade facade, AlgaTerraImportState state) throws SQLException {
192
193 Object alkalinityFlag = rs.getBoolean("AlkalinityFlag");
194
195 //alkalinity marker
196 if (alkalinityFlag != null){
197 MarkerType alkalinityMarkerType = getMarkerType(state, uuidMarkerAlkalinity, "Alkalinity", "Alkalinity", null);
198 boolean alkFlag = Boolean.valueOf(alkalinityFlag.toString());
199 Marker alkalinityMarker = Marker.NewInstance(alkalinityMarkerType, alkFlag);
200 facade.getFieldUnit(true).addMarker(alkalinityMarker);
201 }
202
203
204 DescriptionBase<?> fieldDescription = getFieldObservationDescription(facade);
205
206 //habitat, ecology, community, etc.
207 String habitat = rs.getString("HabitatExplanation");
208
209 if (isNotBlank(habitat)){
210 Feature habitatExplanation = getFeature(state, uuidFeatureHabitatExplanation, "Habitat Explanation", "HabitatExplanation", null, null);
211 TextData textData = TextData.NewInstance(habitatExplanation);
212 textData.putText(Language.DEFAULT(), habitat);
213 fieldDescription.addElement(textData);
214 }
215
216 String community = rs.getString("Comunity");
217 if (isNotBlank(community)){
218 Feature communityFeature = getFeature(state, uuidFeatureSpecimenCommunity, "Community", "The community of a specimen (e.g. other algae in the same sample)", null, null);
219 TextData textData = TextData.NewInstance(communityFeature);
220 textData.putText(Language.DEFAULT(), community);
221 fieldDescription.addElement(textData);
222 }
223
224 String additionalData = rs.getString("AdditionalData");
225 if (isNotBlank(additionalData)){ //or handle it as Annotation ??
226 Feature additionalDataFeature = getFeature(state, uuidFeatureAdditionalData, "Additional Data", "Additional Data", null, null);
227 TextData textData = TextData.NewInstance(additionalDataFeature);
228 textData.putText(Language.DEFAULT(), additionalData);
229 fieldDescription.addElement(textData);
230 }
231
232 String climateUuid = rs.getString("climateUuid");
233 String habitatUuid = rs.getString("habitatUuid");
234 String lifeFormUuid = rs.getString("lifeFormUuid");
235
236 addCategoricalValue(state, fieldDescription, climateUuid, uuidFeatureAlgaTerraClimate);
237 addCategoricalValue(state, fieldDescription, habitatUuid, Feature.HABITAT().getUuid());
238 addCategoricalValue(state, fieldDescription, lifeFormUuid, uuidFeatureAlgaTerraLifeForm);
239
240
241 //parameters
242 makeParameter(state, rs, getFieldObservationDescription(facade));
243
244 }
245
246 private void handleEcoFactSpecificDerivedUnit(ResultSet rs, DerivedUnitFacade facade, AlgaTerraImportState state) throws SQLException {
247 //collection
248 String voucher = rs.getString("Voucher");
249 if (StringUtils.isNotBlank(voucher)){
250 facade.setAccessionNumber(voucher);
251 }
252 }
253
254
255 private void addCategoricalValue(AlgaTerraImportState importState, DescriptionBase description, String uuidTerm, UUID featureUuid) {
256 if (uuidTerm != null){
257 State state = this.getStateTerm(importState, UUID.fromString(uuidTerm));
258 Feature feature = getFeature(importState, featureUuid);
259 CategoricalData categoricalData = CategoricalData.NewInstance(state, feature);
260 description.addElement(categoricalData);
261 }
262 }
263
264 private void makeParameter(AlgaTerraImportState state, ResultSet rs, DescriptionBase<?> descriptionBase) throws SQLException {
265 for (int i = 1; i <= 10; i++){
266 String valueStr = rs.getString(String.format("P%dValue", i));
267 String unitStr = rs.getString(String.format("P%dUnit", i));
268 String parameter = rs.getString(String.format("P%dParameter", i));
269 String method = rs.getString(String.format("P%dMethod", i));
270
271 //method
272 if (StringUtils.isNotBlank(method)){
273 //TODO
274 //see http://dev.e-taxonomy.eu/trac/ticket/4205
275 logger.warn("Methods not yet handled: " + method + ", #4205");
276 }
277 //parameter
278 TermVocabulary<Feature> vocParameter = getVocabulary(TermType.Feature, uuidVocParameter, "Feature vocabulary for AlgaTerra measurement parameters", "Parameters", null, null, false, Feature.COMMON_NAME());
279 if (StringUtils.isNotBlank(parameter)){
280 UUID featureUuid = getParameterFeatureUuid(state, parameter);
281 Feature feature = getFeature(state, featureUuid, parameter, parameter, null, vocParameter);
282 QuantitativeData quantData = QuantitativeData.NewInstance(feature);
283
284 //unit
285 MeasurementUnit unit = getMeasurementUnit(state, unitStr);
286 quantData.setUnit(unit);
287 try {
288
289 Set<DefinedTerm> valueModifier = new HashSet<DefinedTerm>();
290 valueStr = normalizeAndModifyValue(state, valueStr, valueModifier);
291 //value
292 Float valueFlt = Float.valueOf(valueStr); //TODO maybe change model to Double ??
293
294 StatisticalMeasure measureSingleValue = getStatisticalMeasure(state, uuidStatMeasureSingleValue, "Value", "Single measurement value", null, null);
295 StatisticalMeasurementValue value = StatisticalMeasurementValue.NewInstance(measureSingleValue, valueFlt);
296 quantData.addStatisticalValue(value);
297 descriptionBase.addElement(quantData);
298
299 } catch (NumberFormatException e) {
300 logger.warn(String.format("Value '%s' can't be converted to double. Parameter %s not imported.", valueStr, parameter));
301 }
302 }else if (isNotBlank(valueStr) || isNotBlank(unitStr) ){
303 logger.warn("There is value or unit without parameter: " + i);
304 }
305
306
307 }
308
309 }
310
311 private String normalizeAndModifyValue(AlgaTerraImportState state, String valueStr, Set<DefinedTerm> valueModifier) {
312 valueStr = valueStr.replace(",", ".");
313 if (valueStr.startsWith("<")){
314 TermVocabulary<DefinedTerm> measurementValueModifierVocabulary = getVocabulary(TermType.Modifier, uuidMeasurementValueModifier, "Measurement value modifier", "Measurement value modifier", null, null, false, DefinedTerm.NewModifierInstance(null, null, null));
315 DefinedTerm modifier = getModifier(state, uuidModifierLowerThan, "Lower", "Lower than the given measurement value", "<", measurementValueModifierVocabulary);
316 valueModifier.add(modifier);
317 valueStr = valueStr.replace("<", "");
318 }
319 if (valueStr.startsWith(">")){
320 TermVocabulary<DefinedTerm> measurementValueModifierVocabulary = getVocabulary(TermType.Modifier, uuidMeasurementValueModifier, "Measurement value modifier", "Measurement value modifier", null, null, false, DefinedTerm.NewModifierInstance(null, null, null));
321 DefinedTerm modifier = getModifier(state, uuidModifierGreaterThan, "Lower", "Lower than the given measurement value", "<", measurementValueModifierVocabulary);
322 valueModifier.add(modifier);
323 valueStr = valueStr.replace(">", "");
324 }
325 return valueStr;
326 }
327
328
329
330 private UUID getParameterFeatureUuid(AlgaTerraImportState state, String key) {
331 try {
332 return AlgaTerraImportTransformer.getFeatureUuid(key);
333 } catch (UndefinedTransformerMethodException e) {
334 throw new RuntimeException(e);
335 }
336 }
337
338
339
340 /**
341 * TODO move to InputTransformerBase
342 * @param state
343 * @param unitStr
344 * @return
345 */
346 private MeasurementUnit getMeasurementUnit(AlgaTerraImportState state, String unitStr) {
347 if (StringUtils.isNotBlank(unitStr)){
348 UUID uuid = AlgaTerraImportTransformer.getMeasurementUnitUuid(unitStr);
349 if (uuid != null){
350 return getMeasurementUnit(state, uuid, unitStr, unitStr, unitStr, null);
351 }else{
352 logger.warn("MeasurementUnit was not recognized");
353 return null;
354 }
355 }else{
356 return null;
357 }
358 }
359
360 @Override
361 public Map<Object, Map<String, ? extends CdmBase>> getRelatedObjectsForPartition(ResultSet rs, BerlinModelImportState state) {
362 String nameSpace;
363 Class<?> cdmClass;
364 Set<String> idSet;
365 Map<Object, Map<String, ? extends CdmBase>> result = new HashMap<Object, Map<String, ? extends CdmBase>>();
366
367 try{
368 Set<String> fieldObservationIdSet = new HashSet<String>();
369 Set<String> termsIdSet = new HashSet<String>();
370 Set<String> collectionIdSet = new HashSet<String>();
371
372 while (rs.next()){
373 handleForeignKey(rs, fieldObservationIdSet, "DuplicateFk");
374 handleForeignKey(rs, termsIdSet, "ClimateFk");
375 handleForeignKey(rs, termsIdSet, "HabitatFk");
376 handleForeignKey(rs, termsIdSet, "LifeFormFk");
377 handleForeignKey(rs, collectionIdSet, "CollectionFk");
378 }
379
380 //field observation map for duplicates
381 nameSpace = AlgaTerraEcoFactImport.ECO_FACT_FIELD_OBSERVATION_NAMESPACE;
382 cdmClass = FieldUnit.class;
383 idSet = fieldObservationIdSet;
384 Map<String, FieldUnit> fieldObservationMap = (Map<String, FieldUnit>)getCommonService().getSourcedObjectsByIdInSource(cdmClass, idSet, nameSpace);
385 result.put(nameSpace, fieldObservationMap);
386
387 //collections
388 nameSpace = AlgaTerraCollectionImport.NAMESPACE_COLLECTION;
389 cdmClass = Collection.class;
390 idSet = collectionIdSet;
391 Map<String, Collection> collectionMap = (Map<String, Collection>)getCommonService().getSourcedObjectsByIdInSource(cdmClass, idSet, nameSpace);
392 result.put(nameSpace, collectionMap);
393
394 //sub-collections
395 nameSpace = AlgaTerraCollectionImport.NAMESPACE_SUBCOLLECTION;
396 cdmClass = Collection.class;
397 idSet = collectionIdSet;
398 Map<String, Collection> subCollectionMap = (Map<String, Collection>)getCommonService().getSourcedObjectsByIdInSource(cdmClass, idSet, nameSpace);
399 result.put(nameSpace, subCollectionMap);
400
401 //terms
402 nameSpace = AlgaTerraEcoFactImport.TERMS_NAMESPACE;
403 cdmClass = FieldUnit.class; //????????
404 idSet = termsIdSet;
405 Map<String, DefinedTermBase> termMap = (Map<String, DefinedTermBase>)getCommonService().getSourcedObjectsByIdInSource(cdmClass, idSet, nameSpace);
406 result.put(nameSpace, termMap);
407
408 } catch (SQLException e) {
409 throw new RuntimeException(e);
410 }
411 return result;
412 }
413
414 @Override
415 protected boolean doCheck(BerlinModelImportState state){
416 IOValidator<BerlinModelImportState> validator = new AlgaTerraSpecimenImportValidator();
417 return validator.validate(state);
418 }
419
420 @Override
421 protected boolean isIgnore(BerlinModelImportState state){
422 return ! ((AlgaTerraImportState)state).getAlgaTerraConfigurator().isDoEcoFacts();
423 }
424
425 }