Revision 20553b7e
Added by Katja Luther over 4 years ago
cdm-pesi/src/main/java/eu/etaxonomy/cdm/app/pesi/merging/FaunaEuErmsFindIdenticalNamesActivator.java | ||
---|---|---|
85 | 85 |
private boolean writeSameNamesToCsvFile( |
86 | 86 |
List<PesiMergeObject> mergingObjects, String string) { |
87 | 87 |
|
88 |
<<<<<<< HEAD |
|
89 |
/** |
|
90 |
* @param args |
|
91 |
*/ |
|
92 |
public static void main(String[] args) { |
|
93 |
|
|
94 |
FaunaEuErmsFindIdenticalNamesActivator sc = new FaunaEuErmsFindIdenticalNamesActivator(); |
|
95 |
|
|
96 |
CdmApplicationController appCtrFaunaEu = sc.initDb(faunaEuropaeaSource); |
|
97 |
String sFileName = "C:\\Users\\k.luther\\test"; |
|
98 |
//CdmApplicationController appCtrErms = sc.initDb(ermsSource); |
|
99 |
List<String> propertyPaths = new ArrayList<>(); |
|
100 |
propertyPaths.add("sources.*"); |
|
101 |
propertyPaths.add("sources.idInSource"); |
|
102 |
propertyPaths.add("sources.idNamespace"); |
|
103 |
propertyPaths.add("taxonBases.*"); |
|
104 |
propertyPaths.add("taxonBases.relationsFromThisTaxon"); |
|
105 |
propertyPaths.add("taxonBases.taxonNodes.*"); |
|
106 |
propertyPaths.add("taxonBases.taxonNodes.parent.*"); |
|
107 |
propertyPaths.add("taxonBases.taxonNodes.childNodes.*"); |
|
108 |
propertyPaths.add("taxonBases.taxonNodes.childNodes.classification.rootNode.childNodes.*"); |
|
109 |
propertyPaths.add("taxonBases.taxonNodes.parent.taxon.name.*"); |
|
110 |
propertyPaths.add("taxonBases.acceptedTaxon.taxonNodes.*"); |
|
111 |
propertyPaths.add("taxonBases.acceptedTaxon.taxonNodes.childNodes.*"); |
|
112 |
propertyPaths.add("taxonBases.acceptedTaxon.taxonNodes.childNodes.classification.rootNode.childNodes.*"); |
|
113 |
System.err.println("Start getIdenticalNames..."); |
|
114 |
|
|
115 |
faunaSec = appCtrFaunaEu.getReferenceService().load(UUID.fromString("6786d863-75d4-4796-b916-c1c3dff4cb70")); |
|
116 |
ermsSec = appCtrFaunaEu.getReferenceService().load(UUID.fromString("7744bc26-f914-42c4-b54a-dd2a030a8bb7")); |
|
117 |
Map<String, List<TaxonName>> namesOfIdenticalTaxa = appCtrFaunaEu.getTaxonService().findIdenticalTaxonNameIds(ermsSec, faunaSec, propertyPaths); |
|
118 |
|
|
119 |
List<FaunaEuErmsMerging> mergingObjects = new ArrayList<>(); |
|
120 |
FaunaEuErmsMerging mergeObject; |
|
121 |
TaxonName faunaEuTaxName; |
|
122 |
TaxonName ermsTaxName; |
|
123 |
System.err.println("Start creating merging objects"); |
|
124 |
mergingObjects= sc.createMergeObjects(namesOfIdenticalTaxa, appCtrFaunaEu); |
|
125 |
boolean resultOK = true; |
|
126 |
System.err.println("Start creating csv files"); |
|
127 |
resultOK = resultOK && sc.writeSameNamesdifferentAuthorToCsv(mergingObjects, sFileName + "_authors.csv"); |
|
128 |
resultOK = resultOK &&sc.writeSameNamesdifferentStatusToCsv(mergingObjects, sFileName + "_status.csv"); |
|
129 |
resultOK = resultOK &&sc.writeSameNamesToCsVFile(mergingObjects, sFileName + "_names.csv"); |
|
130 |
//do not create the phylum file, explanation inside the method writeSameNamesdifferentPhylumToCsv |
|
131 |
//resultOK = resultOK &&sc.writeSameNamesdifferentPhylumToCsv(mergingObjects, sFileName + "_phylum.csv"); |
|
132 |
resultOK = resultOK &&sc.writeSameNamesDifferentParentToCsv(mergingObjects, sFileName + "parent.csv"); |
|
133 |
|
|
134 |
System.err.println("End merging Fauna Europaea and Erms" + resultOK); |
|
135 |
System.exit(0); |
|
136 |
|
|
137 |
} |
|
138 |
|
|
139 |
private boolean writeSameNamesToCsVFile( |
|
140 |
List<FaunaEuErmsMerging> mergingObjects, String string) { |
|
141 |
======= |
|
142 |
>>>>>>> 34743ac779cc3f8570bd9eae6109207adda534ea |
|
143 | 88 |
try{ |
144 | 89 |
FileWriter writer = new FileWriter(string); |
145 | 90 |
|
... | ... | |
158 | 103 |
} |
159 | 104 |
} |
160 | 105 |
|
161 |
<<<<<<< HEAD |
|
162 |
private boolean writeSameNamesdifferentPhylumToCsv(List<FaunaEuErmsMerging> mergingObjects, String sfileName){ |
|
163 |
try |
|
164 |
{ |
|
165 |
//do we really need this?? it is a taxon needed merged like all others? for erms only one taxon has different phylum. (Valencia, but these are not the same taxa -> fish and ribbon worms) |
|
166 |
======= |
|
167 | 106 |
private boolean writeSameNamesDifferentPhylumToCsv( |
168 | 107 |
List<PesiMergeObject> mergingObjects, String sfileName){ |
169 | 108 |
|
170 | 109 |
try{ |
171 |
>>>>>>> 34743ac779cc3f8570bd9eae6109207adda534ea |
|
172 | 110 |
FileWriter writer = new FileWriter(sfileName); |
173 | 111 |
|
174 | 112 |
//create Header |
... | ... | |
176 | 114 |
createHeader(writer, firstLine); |
177 | 115 |
|
178 | 116 |
//write data |
179 |
<<<<<<< HEAD |
|
180 |
for (FaunaEuErmsMerging merging : mergingObjects){ |
|
181 |
//TODO the phyllum is always different doing it this way, maybe we need to merge the phylum taxa first and then |
|
182 |
======= |
|
183 | 117 |
for (PesiMergeObject merging : mergingObjects){ |
184 | 118 |
//TODO |
185 |
>>>>>>> 34743ac779cc3f8570bd9eae6109207adda534ea |
|
186 | 119 |
if ((merging.getPhylumInErms()== null )^ (merging.getPhylumInFaunaEu()== null)){ |
187 | 120 |
writeCsvLine(writer, merging) ; |
188 | 121 |
}else if(!((merging.getPhylumInErms()==null) && (merging.getPhylumInFaunaEu()==null))){ |
189 |
if(!merging.getPhylumInErms().getNameTitleCache().equals(merging.getPhylumInFaunaEu().getNameTitleCache())){
|
|
122 |
if(!merging.getPhylumInErms().equals(merging.getPhylumInFaunaEu())){
|
|
190 | 123 |
writeCsvLine(writer, merging) ; |
191 | 124 |
} |
192 | 125 |
} |
... | ... | |
260 | 193 |
writer.append(';'); |
261 | 194 |
writer.append("id in Fauna Europaea"); |
262 | 195 |
writer.append(';'); |
263 |
writer.append("name in FE");
|
|
196 |
writer.append("name"); |
|
264 | 197 |
writer.append(';'); |
265 |
writer.append("author in FE");
|
|
198 |
writer.append("author"); |
|
266 | 199 |
writer.append(';'); |
267 |
writer.append("rank in FE");
|
|
200 |
writer.append("rank"); |
|
268 | 201 |
writer.append(';'); |
269 |
writer.append("state in FE");
|
|
202 |
writer.append("state"); |
|
270 | 203 |
writer.append(';'); |
271 |
writer.append("phylum in FE");
|
|
204 |
writer.append("phylum"); |
|
272 | 205 |
writer.append(';'); |
273 |
writer.append("parent in FE");
|
|
206 |
writer.append("parent"); |
|
274 | 207 |
writer.append(';'); |
275 |
writer.append("parent rank in FE");
|
|
208 |
writer.append("parent rank"); |
|
276 | 209 |
writer.append(';'); |
277 | 210 |
|
278 | 211 |
writer.append("uuid in Erms"); |
279 | 212 |
writer.append(';'); |
280 | 213 |
writer.append("id in Erms"); |
281 | 214 |
writer.append(';'); |
282 |
writer.append("name in Erms");
|
|
215 |
writer.append("name"); |
|
283 | 216 |
writer.append(';'); |
284 |
writer.append("author in Erms");
|
|
217 |
writer.append("author"); |
|
285 | 218 |
writer.append(';'); |
286 |
writer.append("rank in Erms");
|
|
219 |
writer.append("rank"); |
|
287 | 220 |
writer.append(';'); |
288 |
writer.append("state in Erms");
|
|
221 |
writer.append("state"); |
|
289 | 222 |
writer.append(';'); |
290 |
writer.append("phylum in Erms");
|
|
223 |
writer.append("phylum"); |
|
291 | 224 |
writer.append(';'); |
292 |
writer.append("parent in Erms");
|
|
225 |
writer.append("parent"); |
|
293 | 226 |
writer.append(';'); |
294 |
writer.append("parent rank in Erms");
|
|
227 |
writer.append("parent rank"); |
|
295 | 228 |
writer.append('\n'); |
296 | 229 |
} |
297 | 230 |
|
... | ... | |
390 | 323 |
} |
391 | 324 |
|
392 | 325 |
writer.append(';'); |
393 |
writer.append(merging.getPhylumInErms() != null? merging.getPhylumInErms().getTitleCache():""); |
|
326 |
writer.append(merging.getPhylumInErms() != null? merging.getPhylumInErms().getTaxonTitleCache():"");
|
|
394 | 327 |
writer.append(';'); |
395 | 328 |
writer.append(merging.getParentStringInErms()); |
396 | 329 |
writer.append(';'); |
... | ... | |
406 | 339 |
Classification classification1 = appCtr.getClassificationService().load(uuidClassification1); |
407 | 340 |
Classification classification2 = appCtr.getClassificationService().load(uuidClassification2); |
408 | 341 |
|
409 |
<<<<<<< HEAD |
|
410 |
} |
|
411 |
}else{ |
|
412 |
mergeObject.setStatInFaunaEu(false); |
|
413 |
TaxonNode parentNode = getAcceptedNode(faunaEuName); |
|
414 |
//TODO: ändern mit erweitertem Initializer.. |
|
415 |
if (parentNode != null){ |
|
416 |
TaxonName parentName = HibernateProxyHelper.deproxy(parentNode.getTaxon().getName()); |
|
417 |
String parentNameCache = parentName.getNameCache(); |
|
418 |
mergeObject.setParentStringInFaunaEu(parentNameCache); |
|
419 |
mergeObject.setParentRankStringInFaunaEu(parentName.getRank().getLabel()); |
|
420 |
|
|
421 |
} |
|
422 |
} |
|
423 |
|
|
424 |
|
|
425 |
mergeObject.setRankInErms(ermsName.getRank().getLabel()); |
|
426 |
mergeObject.setRankInFaunaEu(faunaEuName.getRank().getLabel()); |
|
427 |
|
|
428 |
|
|
429 |
merge.add(mergeObject); |
|
430 |
} |
|
431 |
|
|
432 |
======= |
|
433 | 342 |
List<PesiMergeObject> merge = new ArrayList<>(); |
434 | 343 |
|
435 | 344 |
for (String nameCache: names.keySet()){ |
436 | 345 |
createSingleMergeObject(appCtr, merge, nameCache, names.get(nameCache),classification1, classification2); |
437 | 346 |
} |
438 |
>>>>>>> 34743ac779cc3f8570bd9eae6109207adda534ea |
|
439 | 347 |
|
440 | 348 |
return merge; |
441 | 349 |
} |
Also available in: Unified diff
revert wrong merge changes on findIdenticalNameActivator