1
|
/**
|
2
|
* Copyright (C) 2007 EDIT
|
3
|
* European Distributed Institute of Taxonomy
|
4
|
* http://www.e-taxonomy.eu
|
5
|
*
|
6
|
* The contents of this file are subject to the Mozilla Public License Version 1.1
|
7
|
* See LICENSE.TXT at the top of this package for the full license terms.
|
8
|
*/
|
9
|
|
10
|
package eu.etaxonomy.cdm.database;
|
11
|
|
12
|
import java.util.HashMap;
|
13
|
import java.util.Map;
|
14
|
import java.util.Set;
|
15
|
import java.util.UUID;
|
16
|
|
17
|
import javax.annotation.PostConstruct;
|
18
|
|
19
|
import org.apache.log4j.Logger;
|
20
|
import org.hibernate.Hibernate;
|
21
|
import org.joda.time.DateTime;
|
22
|
import org.joda.time.Period;
|
23
|
import org.springframework.beans.factory.annotation.Autowired;
|
24
|
import org.springframework.stereotype.Component;
|
25
|
import org.springframework.transaction.PlatformTransactionManager;
|
26
|
import org.springframework.transaction.TransactionDefinition;
|
27
|
import org.springframework.transaction.TransactionStatus;
|
28
|
import org.springframework.transaction.support.DefaultTransactionDefinition;
|
29
|
|
30
|
import eu.etaxonomy.cdm.model.common.DefaultTermInitializer;
|
31
|
import eu.etaxonomy.cdm.model.common.DefinedTermBase;
|
32
|
import eu.etaxonomy.cdm.model.common.Representation;
|
33
|
import eu.etaxonomy.cdm.model.common.TermVocabulary;
|
34
|
import eu.etaxonomy.cdm.model.common.VocabularyEnum;
|
35
|
import eu.etaxonomy.cdm.model.common.init.TermLoader;
|
36
|
import eu.etaxonomy.cdm.persistence.dao.common.ITermVocabularyDao;
|
37
|
|
38
|
/**
|
39
|
* Spring bean class to initialize the {@link IVocabularyStore IVocabularyStore}.
|
40
|
* To initialize the store the {@link TermLoader TermLoader} and the {@link IVocabularyStore IVocabularyStore}
|
41
|
* are injected via spring and the initializeTerms method is called as an init-method (@PostConstruct).
|
42
|
|
43
|
* @author a.mueller
|
44
|
*/
|
45
|
|
46
|
@Component
|
47
|
public class PersistentTermInitializer extends DefaultTermInitializer {
|
48
|
private static final Logger logger = Logger.getLogger(PersistentTermInitializer.class);
|
49
|
|
50
|
private boolean omit = false;
|
51
|
protected ITermVocabularyDao vocabularyDao;
|
52
|
|
53
|
protected PlatformTransactionManager transactionManager;
|
54
|
protected DefaultTransactionDefinition txDefinition = new DefaultTransactionDefinition();
|
55
|
|
56
|
public PersistentTermInitializer() {
|
57
|
txDefinition.setName("PersistentTermInitializer.initialize()");
|
58
|
txDefinition.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRED);
|
59
|
}
|
60
|
|
61
|
@Override
|
62
|
public void setOmit(boolean omit) {
|
63
|
this.omit = omit;
|
64
|
}
|
65
|
|
66
|
@Override
|
67
|
public boolean isOmit() {
|
68
|
return omit;
|
69
|
}
|
70
|
|
71
|
|
72
|
@Autowired
|
73
|
public void setVocabularyDao(ITermVocabularyDao vocabularyDao) {
|
74
|
this.vocabularyDao = vocabularyDao;
|
75
|
}
|
76
|
|
77
|
@Autowired
|
78
|
public void setTransactionManager(PlatformTransactionManager transactionManager) {
|
79
|
this.transactionManager = transactionManager;
|
80
|
}
|
81
|
|
82
|
/*
|
83
|
* After a bit of head-scratching I found section 3.5.1.3. in the current spring
|
84
|
* reference manual - @PostConstruct / afterPropertiesSet() is called
|
85
|
* immediatly after the bean is constructed, prior to any AOP interceptors being
|
86
|
* wrapped round the bean. Thus, we have to use programmatic transactions, not
|
87
|
* annotations or pointcuts.
|
88
|
*/
|
89
|
@PostConstruct
|
90
|
@Override
|
91
|
public void initialize() {
|
92
|
super.initialize();
|
93
|
}
|
94
|
|
95
|
|
96
|
@Override
|
97
|
public void doInitialize(){
|
98
|
logger.info("PersistentTermInitializer initialize start ...");
|
99
|
|
100
|
if (omit){
|
101
|
logger.info("PersistentTermInitializer.omit == true, returning without initializing terms");
|
102
|
return;
|
103
|
} else {
|
104
|
Map<UUID,DefinedTermBase> terms = new HashMap<>();
|
105
|
logger.info("PersistentTermInitializer.omit == false, initializing " + VocabularyEnum.values().length + " term classes");
|
106
|
|
107
|
DateTime start = new DateTime();
|
108
|
|
109
|
TransactionStatus txStatus = transactionManager.getTransaction(txDefinition);
|
110
|
|
111
|
//load uuids from csv files
|
112
|
logger.info("Start new ... " );
|
113
|
Map<UUID, Set<UUID>> uuidMap = new HashMap<>();
|
114
|
Map<UUID, VocabularyEnum> vocTypeMap = new HashMap<>();
|
115
|
|
116
|
for(VocabularyEnum vocabularyType : VocabularyEnum.values()) {
|
117
|
UUID vocUUID = termLoader.loadUuids(vocabularyType, uuidMap);
|
118
|
if (! vocUUID.equals(vocabularyType.getUuid())){
|
119
|
throw new IllegalStateException("Vocabulary uuid in csv file and vocabulary type differ for vocabulary type " + vocabularyType.toString());
|
120
|
}
|
121
|
vocTypeMap.put(vocUUID, vocabularyType);
|
122
|
}
|
123
|
|
124
|
//find and create missing terms and load vocabularies from repository
|
125
|
logger.info("Create missing terms ... " );
|
126
|
Map<UUID, TermVocabulary<?>> vocabularyMap = new HashMap<>();
|
127
|
Map<UUID, Set<UUID>> missingTermUuids = new HashMap<>();
|
128
|
|
129
|
vocabularyDao.missingTermUuids(uuidMap, missingTermUuids, vocabularyMap);
|
130
|
|
131
|
for( VocabularyEnum vocabularyType : VocabularyEnum.values()) { //required to keep the order (language must be the first vocabulary to load)
|
132
|
UUID vocUuid = vocabularyType.getUuid();
|
133
|
if (missingTermUuids.keySet().contains(vocabularyType.getUuid()) || vocabularyMap.get(vocUuid) == null ){
|
134
|
|
135
|
VocabularyEnum vocType = vocTypeMap.get(vocUuid); //TODO not really necessary, we could also do VocType.getUuuid();
|
136
|
TermVocabulary<?> voc = vocabularyMap.get(vocUuid);
|
137
|
if (voc == null){
|
138
|
//vocabulary is missing
|
139
|
voc = termLoader.loadTerms(vocType, terms);
|
140
|
vocabularyDao.save(voc);
|
141
|
vocabularyMap.put(voc.getUuid(), voc);
|
142
|
}else{
|
143
|
//single terms are missing
|
144
|
Set<UUID> missingTermsOfVoc = missingTermUuids.get(vocUuid);
|
145
|
Set<? extends DefinedTermBase> createdTerms = termLoader.loadSingleTerms(vocType, voc, missingTermsOfVoc);
|
146
|
vocabularyDao.saveOrUpdate(voc);
|
147
|
}
|
148
|
}
|
149
|
initializeAndStore(vocabularyType, terms, vocabularyMap); //TODO
|
150
|
}
|
151
|
|
152
|
transactionManager.commit(txStatus);
|
153
|
|
154
|
DateTime end = new DateTime();
|
155
|
Period period = new Period(start, end);
|
156
|
logger.info ("Term loading took " + period.getSeconds() + "." + period.getMillis() + " seconds ");
|
157
|
|
158
|
}
|
159
|
logger.info("PersistentTermInitializer initialize end ...");
|
160
|
}
|
161
|
|
162
|
|
163
|
/**
|
164
|
* Initializes the static fields of the <code>TermVocabulary</code> classes.
|
165
|
*
|
166
|
* @param clazz the <code>Class</code> of the vocabulary
|
167
|
* @param vocabularyUuid the <code>UUID</code> of the vocabulary
|
168
|
* @param terms a <code>Map</code> containing all already
|
169
|
* loaded terms with their <code>UUID</code> as key
|
170
|
* @param vocabularyMap
|
171
|
*/
|
172
|
protected void initializeAndStore(VocabularyEnum vocType, Map<UUID,DefinedTermBase> terms, Map<UUID, TermVocabulary<?>> vocabularyMap) {
|
173
|
Class<? extends DefinedTermBase<?>> clazz = vocType.getClazz();
|
174
|
UUID vocabularyUuid = vocType.getUuid();
|
175
|
|
176
|
if (logger.isDebugEnabled()){ logger.debug("Loading vocabulary for class " + clazz.getSimpleName() + " with uuid " + vocabularyUuid );}
|
177
|
|
178
|
TermVocabulary<? extends DefinedTermBase> persistedVocabulary;
|
179
|
if (vocabularyMap == null || vocabularyMap.get(vocabularyUuid) == null ){
|
180
|
persistedVocabulary = vocabularyDao.findByUuid(vocabularyUuid);
|
181
|
}else{
|
182
|
persistedVocabulary = vocabularyMap.get(vocabularyUuid);
|
183
|
}
|
184
|
|
185
|
if (logger.isDebugEnabled()){ logger.debug("Initializing terms in vocabulary for class " + clazz.getSimpleName() + " with uuid " + vocabularyUuid );}
|
186
|
//not really needed anymore as we do term initializing from the beginning now
|
187
|
if (persistedVocabulary != null){
|
188
|
for(DefinedTermBase<?> definedTermBase : persistedVocabulary.getTerms()) {
|
189
|
|
190
|
Hibernate.initialize(definedTermBase.getRepresentations());
|
191
|
for(Representation r : definedTermBase.getRepresentations()) {
|
192
|
Hibernate.initialize(r.getLanguage());
|
193
|
}
|
194
|
terms.put(definedTermBase.getUuid(), definedTermBase);
|
195
|
}
|
196
|
}else{
|
197
|
logger.error("Persisted Vocabulary does not exist in database: " + vocabularyUuid);
|
198
|
throw new IllegalStateException("Persisted Vocabulary does not exist in database: " + vocabularyUuid);
|
199
|
}
|
200
|
|
201
|
|
202
|
//fill term store
|
203
|
if (logger.isDebugEnabled()){ logger.debug("Setting defined Terms for class " + clazz.getSimpleName() + ", " + persistedVocabulary.getTerms().size() + " in vocabulary");}
|
204
|
super.setDefinedTerms(clazz, persistedVocabulary);
|
205
|
if (logger.isDebugEnabled()){ logger.debug("Second pass - DONE");}
|
206
|
|
207
|
}
|
208
|
|
209
|
/**
|
210
|
* This method loads the vocabularies from CSV files and compares them to the vocabularies
|
211
|
* already in database. Non-existing vocabularies will be created and vocabularies with missing
|
212
|
* terms will be updated.
|
213
|
*
|
214
|
* @param clazz the <code>Class</code> of the vocabulary
|
215
|
* @param persistedTerms a <code>Map</code> containing all already
|
216
|
* loaded terms with their <code>UUID</code> as key
|
217
|
* @return the <code>UUID</code> of the loaded vocabulary as found in CSV file
|
218
|
*/
|
219
|
private UUID firstPass(VocabularyEnum vocabularyType, Map<UUID, DefinedTermBase> persistedTerms) {
|
220
|
logger.info("Loading terms for '" + vocabularyType.name() + "': " + vocabularyType.getClazz().getName());
|
221
|
Map<UUID,DefinedTermBase> terms = new HashMap<UUID,DefinedTermBase>();
|
222
|
|
223
|
for(DefinedTermBase persistedTerm : persistedTerms.values()) {
|
224
|
terms.put(persistedTerm.getUuid(), persistedTerm);
|
225
|
}
|
226
|
|
227
|
TermVocabulary<?> loadedVocabulary = termLoader.loadTerms(vocabularyType, terms);
|
228
|
|
229
|
UUID vocabularyUuid = loadedVocabulary.getUuid();
|
230
|
|
231
|
|
232
|
if (logger.isDebugEnabled()){logger.debug("loading persisted vocabulary " + vocabularyUuid);}
|
233
|
TermVocabulary<DefinedTermBase> persistedVocabulary = vocabularyDao.findByUuid(vocabularyUuid);
|
234
|
if(persistedVocabulary == null) { // i.e. there is no persisted vocabulary
|
235
|
//handle new vocabulary
|
236
|
if (logger.isDebugEnabled()){logger.debug("vocabulary " + vocabularyUuid + " does not exist - saving");}
|
237
|
saveVocabulary(loadedVocabulary);
|
238
|
}else {
|
239
|
//handle existing vocabulary
|
240
|
if (logger.isDebugEnabled()){logger.debug("vocabulary " + vocabularyUuid + " does exist and already has " + persistedVocabulary.size() + " terms");}
|
241
|
boolean persistedVocabularyHasMissingTerms = false;
|
242
|
for(Object t : loadedVocabulary.getTerms()) {
|
243
|
if(!persistedVocabulary.getTerms().contains(t)) {
|
244
|
persistedVocabularyHasMissingTerms = true;
|
245
|
persistedVocabulary.addTerm((DefinedTermBase)t);
|
246
|
}
|
247
|
}
|
248
|
if(persistedVocabularyHasMissingTerms) {
|
249
|
if (logger.isDebugEnabled()){logger.debug("vocabulary " + vocabularyUuid + " exists but does not have all the required terms - updating");}
|
250
|
updateVocabulary(persistedVocabulary);
|
251
|
}
|
252
|
}
|
253
|
return vocabularyUuid;
|
254
|
}
|
255
|
|
256
|
private void updateVocabulary(TermVocabulary vocabulary) {
|
257
|
TransactionStatus txStatus = transactionManager.getTransaction(txDefinition);
|
258
|
vocabularyDao.update(vocabulary);
|
259
|
transactionManager.commit(txStatus);
|
260
|
}
|
261
|
|
262
|
private void saveVocabulary(TermVocabulary vocabulary) {
|
263
|
TransactionStatus txStatus = transactionManager.getTransaction(txDefinition);
|
264
|
vocabularyDao.save(vocabulary);
|
265
|
transactionManager.commit(txStatus);
|
266
|
}
|
267
|
}
|