Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 6 additions & 6 deletions src/colvar.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -1280,7 +1280,7 @@ int colvar::init_dependencies() {
// Initialize feature_states for each instance
feature_states.reserve(f_cv_ntot);
for (i = feature_states.size(); i < f_cv_ntot; i++) {
feature_states.push_back(feature_state(true, false));
feature_states.emplace_back(true, false);
// Most features are available, so we set them so
// and list exceptions below
}
Expand Down Expand Up @@ -2043,8 +2043,8 @@ void colvar::communicate_forces()
func_grads.reserve(cvcs.size());
for (i = 0; i < cvcs.size(); i++) {
if (!cvcs[i]->is_enabled()) continue;
func_grads.push_back(cvm::matrix2d<cvm::real> (x.size(),
cvcs[i]->value().size()));
func_grads.emplace_back(x.size(),
cvcs[i]->value().size());
}
int res = cvm::proxy->run_colvar_gradient_callback(scripted_function, sorted_cvc_values, func_grads);

Expand Down Expand Up @@ -2803,7 +2803,7 @@ int colvar::calc_acf()
case acf_vel:
// allocate space for the velocities history
for (i = 0; i < acf_stride; i++) {
acf_v_history.push_back(std::list<colvarvalue>());
acf_v_history.emplace_back();
}
acf_v_history_p = acf_v_history.begin();
break;
Expand All @@ -2812,7 +2812,7 @@ int colvar::calc_acf()
case acf_p2coor:
// allocate space for the coordinates history
for (i = 0; i < acf_stride; i++) {
acf_x_history.push_back(std::list<colvarvalue>());
acf_x_history.emplace_back();
}
acf_x_history_p = acf_x_history.begin();
break;
Expand Down Expand Up @@ -3003,7 +3003,7 @@ int colvar::calc_runave()

acf_nframes = 0;

x_history.push_back(std::list<colvarvalue>());
x_history.emplace_back();
x_history_p = x_history.begin();

} else {
Expand Down
8 changes: 4 additions & 4 deletions src/colvar_neuralnetworkcompute.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -246,15 +246,15 @@ bool neuralNetworkCompute::addDenseLayer(const denseLayer& layer) {
if (m_dense_layers.empty()) {
// add layer to this ann directly if m_dense_layers is empty
m_dense_layers.push_back(layer);
m_layers_output.push_back(std::vector<double>(layer.getOutputSize()));
m_grads_tmp.push_back(std::vector<std::vector<double>>(layer.getOutputSize(), std::vector<double>(layer.getInputSize(), 0)));
m_layers_output.emplace_back(layer.getOutputSize());
m_grads_tmp.emplace_back(layer.getOutputSize(), std::vector<double>(layer.getInputSize(), 0));
return true;
} else {
// otherwise, we need to check if the output of last layer in m_dense_layers matches the input of layer to be added
if (m_dense_layers.back().getOutputSize() == layer.getInputSize()) {
m_dense_layers.push_back(layer);
m_layers_output.push_back(std::vector<double>(layer.getOutputSize()));
m_grads_tmp.push_back(std::vector<std::vector<double>>(layer.getOutputSize(), std::vector<double>(layer.getInputSize(), 0)));
m_layers_output.emplace_back(layer.getOutputSize());
m_grads_tmp.emplace_back(layer.getOutputSize(), std::vector<double>(layer.getInputSize(), 0));
return true;
} else {
return false;
Expand Down
2 changes: 1 addition & 1 deletion src/colvaratoms.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -213,7 +213,7 @@ int cvm::atom_group::init_dependencies() {
// default as unavailable, not enabled
feature_states.reserve(f_ag_ntot);
for (i = feature_states.size(); i < colvardeps::f_ag_ntot; i++) {
feature_states.push_back(feature_state(false, false));
feature_states.emplace_back(false, false);
}

// Features that are implemented (or not) by all atom groups
Expand Down
4 changes: 2 additions & 2 deletions src/colvarbias.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -248,7 +248,7 @@ int colvarbias::init_dependencies() {
// Initialize feature_states for each instance
feature_states.reserve(f_cvb_ntot);
for (i = feature_states.size(); i < f_cvb_ntot; i++) {
feature_states.push_back(feature_state(true, false));
feature_states.emplace_back(true, false);
// Most features are available, so we set them so
// and list exceptions below
}
Expand Down Expand Up @@ -352,7 +352,7 @@ int colvarbias::add_colvar(std::string const &cv_name)
// although possibly not at all timesteps
add_child(cv);

colvar_forces.push_back(colvarvalue());
colvar_forces.emplace_back();
colvar_forces.back().type(cv->value()); // make sure each force is initialized to zero
colvar_forces.back().is_derivative(); // colvar constraints are not applied to the force
colvar_forces.back().reset();
Expand Down
4 changes: 2 additions & 2 deletions src/colvarbias_abf.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -925,7 +925,7 @@ template <typename OST> OST & colvarbias_abf::write_state_data_template_(OST &os
{
auto flags = os.flags();

os.setf(std::ios::fmtflags(std::ios::dec), std::ios::floatfield); // default floating-point format
os.unsetf(std::ios::floatfield); // default floating-point format

write_state_data_key(os, "samples");
samples->write_raw(os, 8);
Expand All @@ -941,7 +941,7 @@ template <typename OST> OST & colvarbias_abf::write_state_data_template_(OST &os
}

if (b_CZAR_estimator) {
os.setf(std::ios::fmtflags(std::ios::dec), std::ios::floatfield); // default floating-point format
os.unsetf(std::ios::floatfield); // default floating-point format
write_state_data_key(os, "z_samples");
z_samples->write_raw(os, 8);
write_state_data_key(os, "z_gradient");
Expand Down
2 changes: 1 addition & 1 deletion src/colvarbias_histogram.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -216,7 +216,7 @@ cvm::memory_stream & colvarbias_histogram::read_state_data(cvm::memory_stream& i
std::ostream & colvarbias_histogram::write_state_data(std::ostream& os)
{
std::ios::fmtflags flags(os.flags());
os.setf(std::ios::fmtflags(std::ios::dec), std::ios::floatfield);
os.unsetf(std::ios::floatfield);
write_state_data_key(os, "grid");
grid->write_raw(os, 8);
os.flags(flags);
Expand Down
2 changes: 1 addition & 1 deletion src/colvarbias_histogram_reweight_amd.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -318,7 +318,7 @@ void colvarbias_reweightaMD::compute_cumulant_expansion_factor(
template <typename OST> OST & colvarbias_reweightaMD::write_state_data_template_(OST& os)
{
std::ios::fmtflags flags(os.flags());
os.setf(std::ios::fmtflags(std::ios::dec), std::ios::floatfield);
os.unsetf(std::ios::floatfield);
write_state_data_key(os, "grid");
grid->write_raw(os, 8);
write_state_data_key(os, "grid_count");
Expand Down
2 changes: 1 addition & 1 deletion src/colvarbias_meta.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -1705,7 +1705,7 @@ template <typename IST> IST &colvarbias_meta::read_hill_template_(IST &is)
}

hill_iter const hills_end = hills.end();
hills.push_back(hill(h_it, h_weight, h_centers, h_sigmas, h_replica));
hills.emplace_back(h_it, h_weight, h_centers, h_sigmas, h_replica);
if (new_hills_begin == hills_end) {
// if new_hills_begin is unset, set it for the first time
new_hills_begin = hills.end();
Expand Down
2 changes: 1 addition & 1 deletion src/colvarcomp.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -308,7 +308,7 @@ int colvar::cvc::init_dependencies() {
feature_states.reserve(f_cvc_ntot);
for (i = feature_states.size(); i < colvardeps::f_cvc_ntot; i++) {
bool avail = is_dynamic(i) ? false : true;
feature_states.push_back(feature_state(avail, false));
feature_states.emplace_back(avail, false);
}

// Features that are implemented by all cvcs by default
Expand Down
4 changes: 2 additions & 2 deletions src/colvarcomp_neuralnetwork.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -77,14 +77,14 @@ int colvar::neuralNetwork::init(std::string const &conf)
return cvm::error("Unknown activation function name: \"" + function_name + "\".\n",
COLVARS_INPUT_ERROR);
}
activation_functions.push_back(std::make_pair(false, function_name));
activation_functions.emplace_back(false, function_name);
cvm::log(std::string{"The activation function for layer["} + cvm::to_str(num_activation_functions + 1) + std::string{"] is "} + function_name + '\n');
++num_activation_functions;
#ifdef LEPTON
} else if (key_lookup(conf, lookup_key_custom.c_str())) {
std::string function_expression;
get_keyval(conf, lookup_key_custom.c_str(), function_expression, std::string(""));
activation_functions.push_back(std::make_pair(true, function_expression));
activation_functions.emplace_back(true, function_expression);
cvm::log(std::string{"The custom activation function for layer["} + cvm::to_str(num_activation_functions + 1) + std::string{"] is "} + function_expression + '\n');
++num_activation_functions;
#endif
Expand Down
6 changes: 3 additions & 3 deletions src/colvardeps.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -421,22 +421,22 @@ void colvardeps::require_feature_children(int f, int g) {


void colvardeps::require_feature_alt(int f, int g, int h) {
features()[f]->requires_alt.push_back(std::vector<int>(2));
features()[f]->requires_alt.emplace_back(2);
features()[f]->requires_alt.back()[0] = g;
features()[f]->requires_alt.back()[1] = h;
}


void colvardeps::require_feature_alt(int f, int g, int h, int i) {
features()[f]->requires_alt.push_back(std::vector<int>(3));
features()[f]->requires_alt.emplace_back(3);
features()[f]->requires_alt.back()[0] = g;
features()[f]->requires_alt.back()[1] = h;
features()[f]->requires_alt.back()[2] = i;
}


void colvardeps::require_feature_alt(int f, int g, int h, int i, int j) {
features()[f]->requires_alt.push_back(std::vector<int>(4));
features()[f]->requires_alt.emplace_back(4);
features()[f]->requires_alt.back()[0] = g;
features()[f]->requires_alt.back()[1] = h;
features()[f]->requires_alt.back()[2] = i;
Expand Down
2 changes: 1 addition & 1 deletion src/colvargrid_integrate.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ colvargrid_integrate::colvargrid_integrate(std::shared_ptr<colvar_grid_gradient>
for (size_t i = 0; i < nd; i++ ) {
if (!periodic[i]) nx[i]++;
// Shift the grid by half the bin width (values at edges instead of center of bins)
lower_boundaries.push_back(gradients->lower_boundaries[i].real_value - 0.5 * widths[i]);
lower_boundaries.emplace_back(gradients->lower_boundaries[i].real_value - 0.5 * widths[i]);
}

setup(nx);
Expand Down
2 changes: 1 addition & 1 deletion src/colvarmodule.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -2125,7 +2125,7 @@ int cvm::read_index_file(char const *filename)
if (!is) {
return COLVARS_FILE_ERROR;
} else {
index_file_names.push_back(std::string(filename));
index_file_names.emplace_back(filename);
}

while (is.good()) {
Expand Down
14 changes: 7 additions & 7 deletions src/colvarproxy.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -54,9 +54,9 @@ int colvarproxy_atoms::add_atom_slot(int atom_id)
atoms_refcount.push_back(1);
atoms_masses.push_back(1.0);
atoms_charges.push_back(0.0);
atoms_positions.push_back(cvm::rvector(0.0, 0.0, 0.0));
atoms_total_forces.push_back(cvm::rvector(0.0, 0.0, 0.0));
atoms_new_colvar_forces.push_back(cvm::rvector(0.0, 0.0, 0.0));
atoms_positions.emplace_back(0.0, 0.0, 0.0);
atoms_total_forces.emplace_back(0.0, 0.0, 0.0);
atoms_new_colvar_forces.emplace_back(0.0, 0.0, 0.0);
modified_atom_list_ = true;
return (atoms_ids.size() - 1);
}
Expand Down Expand Up @@ -181,9 +181,9 @@ int colvarproxy_atom_groups::add_atom_group_slot(int atom_group_id)
atom_groups_refcount.push_back(1);
atom_groups_masses.push_back(1.0);
atom_groups_charges.push_back(0.0);
atom_groups_coms.push_back(cvm::rvector(0.0, 0.0, 0.0));
atom_groups_total_forces.push_back(cvm::rvector(0.0, 0.0, 0.0));
atom_groups_new_colvar_forces.push_back(cvm::rvector(0.0, 0.0, 0.0));
atom_groups_coms.emplace_back(0.0, 0.0, 0.0);
atom_groups_total_forces.emplace_back(0.0, 0.0, 0.0);
atom_groups_new_colvar_forces.emplace_back(0.0, 0.0, 0.0);
return (atom_groups_ids.size() - 1);
}

Expand Down Expand Up @@ -503,7 +503,7 @@ int colvarproxy::request_deletion()

void colvarproxy::add_config(std::string const &cmd, std::string const &conf)
{
reinterpret_cast<std::list<std::pair<std::string, std::string> > *>(config_queue_)->push_back(std::make_pair(cmd, conf));
reinterpret_cast<std::list<std::pair<std::string, std::string> > *>(config_queue_)->emplace_back(cmd, conf);
}


Expand Down
13 changes: 8 additions & 5 deletions src/colvars_memstream.h
Original file line number Diff line number Diff line change
Expand Up @@ -106,16 +106,19 @@ class cvm::memory_stream {
inline memory_stream & seekg(size_t pos) { read_pos_ = pos; return *this; }

/// Ignore formatting operators
inline void setf(decltype(std::ios::fmtflags(std::ios::unitbuf)), decltype(std::ios::floatfield)) {}
inline void setf(std::ios::fmtflags /* flags */, std::ios::fmtflags /* mask */) {}

/// Ignore formatting operators
inline void setf(decltype(std::ios::fmtflags(std::ios::unitbuf))) {}
inline void setf(std::ios::fmtflags) {}

/// Ignore formatting operators
inline void flags(decltype(std::ios::fmtflags(std::ios::unitbuf))) {}
inline void unsetf(std::ios::fmtflags) {}

/// Get the current formatting flags (throw a useless result because this stream is unformatted)
inline decltype(std::ios::fmtflags(std::ios::unitbuf)) flags() const { return std::ios::fmtflags(std::ios::unitbuf); }
/// Ignore formatting operators
inline void flags(std::ios::fmtflags) {}

/// Get the current formatting flags (i.e. none because this stream is unformatted)
inline std::ios::fmtflags flags() const { return std::ios::fmtflags{}; }

/// Get the error code
inline std::ios::iostate rdstate() const { return state_; }
Expand Down
2 changes: 1 addition & 1 deletion src/colvarscript.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -481,7 +481,7 @@ std::vector<std::string> colvarscript::obj_to_str_vector(unsigned char *obj)
str+"\n", COLVARS_INPUT_ERROR);
break;
}
new_result.push_back(std::string(""));
new_result.emplace_back("");
while (str[i] != '\"') {
new_result.back().append(1, str[i]);
if (i >= str.length()) {
Expand Down
Loading