-
Notifications
You must be signed in to change notification settings - Fork 3
/
mpiobservablehandlerpt.cpp
300 lines (265 loc) · 13.2 KB
/
mpiobservablehandlerpt.cpp
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. See the enclosed file LICENSE for a copy or if
* that was not distributed with this file, You can obtain one at
* http://mozilla.org/MPL/2.0/.
*
* Copyright 2017 Max H. Gerlach
*
* */
#pragma GCC diagnostic ignored "-Wconversion"
#pragma GCC diagnostic ignored "-Wshadow"
#include "boost/filesystem.hpp"
#pragma GCC diagnostic ignored "-Wunused-parameter"
#include "boost/mpi.hpp"
#pragma GCC diagnostic pop
#include "mpiobservablehandlerpt.h"
namespace fs = boost::filesystem;
namespace mpi = boost::mpi;
ScalarObservableHandlerPT::ScalarObservableHandlerPT(
const ScalarObservable& localObservable,
const std::vector<int>& current_process_par,
const DetQMCParams& simulationParameters,
const DetQMCPTParams& ptParams,
const MetadataMap& metadataToStoreModel,
const MetadataMap& metadataToStoreMC,
const MetadataMap& metadataToStorePT)
: ObservableHandlerPTCommon<double>(localObservable, current_process_par,
simulationParameters, ptParams,
metadataToStoreModel, metadataToStoreMC,
metadataToStorePT),
par_timeseriesBuffer(),
par_storage(),
par_storageFileStarted()
{
if (processIndex == 0) {
//by default one empty vector for each control parameter value
//(as many as there are processes)
par_timeseriesBuffer.resize(numProcesses, std::vector<double>());
//initialize to a vector of something like a nullptr
par_storage.resize(numProcesses);
//boolean false stored as char
par_storageFileStarted.resize(numProcesses, false);
}
}
//to be called by insertvalue
void ScalarObservableHandlerPT::handleValues(uint32_t curSweep) {
if (processIndex == 0) {
if (mcparams.timeseries) {
for (int p_i = 0; p_i < numProcesses; ++p_i) {
int controlParameterIndex = process_par[p_i];
par_timeseriesBuffer[controlParameterIndex].push_back(process_cur_value[p_i]);
}
}
}
ObservableHandlerPTCommon<double>::handleValues(curSweep);
}
void ScalarObservableHandlerPT::insertValue(uint32_t curSweep) {
//MPI: gather the value of localObs from each replica in the
//buffer at the root process: process_cur_value
// MPI_Gather( const_cast<double*>(&(localObs.valRef.get())), // sendbuf :
// // pass memory address of what localObs references | need to cast away const for mpi < 3.0
// 1, // sendcount
// MPI_DOUBLE, // sendtype
// process_cur_value.data(), // recvbuf: pointer to internal memory of vector [at root process]
// 1, // recvcount
// MPI_DOUBLE, // recvtype
// 0, // root
// MPI_COMM_WORLD // comm
// );
mpi::communicator world;
mpi::gather(world,
localObs.valRef.get(), // send: what localObs references
process_cur_value, // recv
0);
this->handleValues(curSweep);
}
std::tuple<double, double> ScalarObservableHandlerPT::evaluateJackknife(
int control_parameter_index) const {
if (processIndex != 0) {
return std::make_tuple(0.0, 0.0);
} else {
double mean;
double error;
std::tie(mean, error) = ObservableHandlerPTCommon<double>::evaluateJackknife(control_parameter_index);
if (jkBlockCount <= 1 and par_timeseriesBuffer[control_parameter_index].size() == countValues) {
error = std::sqrt(variance(par_timeseriesBuffer[control_parameter_index], mean));
}
return std::make_tuple(mean, error);
}
}
void ScalarObservableHandlerPT::outputTimeseries() {
//TODO: float precision
if (processIndex == 0 and mcparams.timeseries) {
for (int p_i = 0; p_i < numProcesses; ++p_i) {
int cpi = process_par[p_i];
std::string subdirectory = "p" + numToString(cpi) + "_" +
ptparams.controlParameterName +
numToString(ptparams.controlParameterValues[cpi]);
fs::create_directories(fs::path(subdirectory));
if (not par_storage[cpi]) {
std::string filename = (fs::path(subdirectory) / fs::path(name + ".series")).string();
if (not par_storageFileStarted[cpi]) {
par_storage[cpi] =
std::unique_ptr<DoubleVectorWriterSuccessive>(
new DoubleVectorWriterSuccessive(filename,
false // create a new file
));
par_storage[cpi]->addHeaderText("Timeseries for observable " + name);
par_storage[cpi]->addMetadataMap(par_metaModel[cpi]);
par_storage[cpi]->addMetadataMap(metaMC);
par_storage[cpi]->addMetadataMap(metaPT);
par_storage[cpi]->addMeta("observable", name);
par_storage[cpi]->writeHeader();
par_storageFileStarted[cpi] = true;
} else {
par_storage[cpi] = std::unique_ptr<DoubleVectorWriterSuccessive>(
new DoubleVectorWriterSuccessive(filename,
true// append to file
));
}
}
//append last batch of measurements
par_storage[cpi]->writeData(par_timeseriesBuffer[cpi]);
//no need to keep it in memory anymore
par_timeseriesBuffer[cpi].resize(0);
}
}
}
VectorObservableHandlerPT::VectorObservableHandlerPT(const VectorObservable& localObservable,
const std::vector<int>& current_process_par,
const DetQMCParams& simulationParameters,
const DetQMCPTParams& ptParams,
const MetadataMap& metadataToStoreModel,
const MetadataMap& metadataToStoreMC,
const MetadataMap& metadataToStorePT)
: ObservableHandlerPTCommon<arma::Col<double>>(
localObservable, current_process_par,
simulationParameters, ptParams,
metadataToStoreModel, metadataToStoreMC,
metadataToStorePT,
arma::zeros<arma::Col<double>>(localObservable.vectorSize)),
vsize(localObservable.vectorSize), indexes(vsize), indexName("site"),
mpi_gather_buffer()
{
for (uint32_t counter = 0; counter < vsize; ++counter) {
indexes[counter] = counter;
}
if (processIndex == 0) {
mpi_gather_buffer.resize(numProcesses * vsize, 0.0);
} else {
mpi_gather_buffer.resize(1, 0.0);
}
}
void VectorObservableHandlerPT::insertValue(uint32_t curSweep) {
mpi::communicator world;
//MPI: gather the value of localObs from each replica in the
//buffer at the root process: process_cur_value
assert(vsize == localObs.valRef.get().n_elem);
// MPI_Gather( const_cast<double*>(localObs.valRef.get().memptr()), // sendbuf
// // pass arma vector data behind localObs reference,
// // need to cast away const for MPI < 3.0
// vsize, // sendcount
// MPI_DOUBLE, // sendtype
// mpi_gather_buffer.data(), // recvbuf: pointer to internal memory of vector [at root process]
// vsize, // recvcount
// MPI_DOUBLE, // recvtype
// 0, // root
// MPI_COMM_WORLD // comm
// );
mpi::gather(world,
localObs.valRef.get().memptr(), // send: pass arma vector data behind localObs reference
vsize, // sendcount
mpi_gather_buffer, // recv
0 // root
);
// use the contiguous memory of mpi_gather_buffer to hold the data
// for the Armadillo vectors used for the individual replica measurements
// at the root process
if (processIndex == 0) {
assert(mpi_gather_buffer.size() == numProcesses * vsize);
for (int p_i = 0; p_i < numProcesses; ++p_i) {
assert(process_cur_value[p_i].n_elem == vsize);
process_cur_value[p_i] = arma::Col<double>(
mpi_gather_buffer.data() + p_i * vsize, // aux_mem* [typed pointer: we do not need a sizeof(double) factor]
vsize, // number_of_elements
false, // copy_aux_mem [will continue to use the mpi_gather_buffer memory]
true // strict [vector will remain bound to this memory for its lifetime]
);
}
}
this->handleValues(curSweep);
}
void outputResults(const std::vector<std::unique_ptr<ScalarObservableHandlerPT>>& obsHandlers) {
boost::mpi::communicator world;
int processIndex = world.rank();
int numProcesses = world.size();
if (processIndex == 0 and obsHandlers.size() > 0) {
typedef std::map<std::string, num> StringNumMap;
typedef std::shared_ptr<StringNumMap> StringNumMapPtr;
for (int cpi = 0; cpi < numProcesses; ++cpi) {
StringNumMapPtr values(new StringNumMap);
StringNumMapPtr errors(new StringNumMap);
std::string subdirectory = "p" + numToString(cpi) + "_" +
(*obsHandlers.begin())->ptparams.controlParameterName +
numToString((*obsHandlers.begin())->ptparams.controlParameterValues[cpi]);
fs::create_directories(fs::path(subdirectory));
for (auto p = obsHandlers.cbegin(); p != obsHandlers.cend(); ++p) {
num val, err;
std::tie(val, err) = (*p)->evaluateJackknife(cpi);
std::string obsname = (*p)->name;
(*values)[obsname] = val;
(*errors)[obsname] = err;
}
DataMapWriter<std::string, num> output;
output.setData(values);
output.setErrors(errors);
output.addHeaderText("Monte Carlo results for observable expectation values");
output.addMetadataMap((*obsHandlers.begin())->par_metaModel[cpi]);
output.addMetadataMap((*obsHandlers.begin())->metaMC);
output.addMetadataMap((*obsHandlers.begin())->metaPT);
output.addMeta("key", "observable");
output.addHeaderText("observable\t value \t error");
output.writeToFile((fs::path(subdirectory) / fs::path("results.values")).string());
}
}
}
void outputResults(const std::vector<std::unique_ptr<VectorObservableHandlerPT>>& obsHandlers) {
boost::mpi::communicator world;
int processIndex = world.rank();
int numProcesses = world.size();
if (processIndex == 0 and obsHandlers.size() > 0) {
typedef std::map<num, num> NumMap;
typedef std::shared_ptr<NumMap> NumMapPtr;
typedef DataMapWriter<num,num> NumMapWriter;
for (int cpi = 0; cpi < numProcesses; ++cpi) {
std::string subdirectory = "p" + numToString(cpi) + "_" +
(*obsHandlers.begin())->ptparams.controlParameterName +
numToString((*obsHandlers.begin())->ptparams.controlParameterValues[cpi]);
for (auto p = obsHandlers.cbegin(); p != obsHandlers.cend(); ++p) {
const std::unique_ptr<VectorObservableHandlerPT>& obsptr = *p;
uint32_t numberIndexes = obsptr->getVectorSize();
arma::Col<num> values, errors;
std::tie(values, errors) = obsptr->evaluateJackknife(cpi);
NumMapPtr valmap(new NumMap);
NumMapPtr errmap(new NumMap);
for (uint32_t counter = 0; counter < numberIndexes; ++counter) {
num index = obsptr->indexes[counter];
valmap->insert(std::make_pair(index, values[counter]));
errmap->insert(std::make_pair(index, errors[counter]));
}
NumMapWriter output;
output.setData(valmap);
output.setErrors(errmap);
output.addHeaderText("Monte Carlo results for vector observable " + obsptr->name +
" expectation values");
output.addMetadataMap(obsptr->par_metaModel[cpi]);
output.addMetadataMap(obsptr->metaMC);
output.addMetadataMap(obsptr->metaPT);
output.addMeta("key", obsptr->indexName);
output.addMeta("observable", obsptr->name);
output.addHeaderText("key\t value \t error");
output.writeToFile(subdirectory + "/results-" + obsptr->name + ".values");
}
}
}
}