Skip to content
Snippets Groups Projects
Commit 0ca1aa19 authored by Erik Strand's avatar Erik Strand
Browse files

Template gradient descent on log class

As opposed to inheriting from one that's decided by compile time
definitions. This will make it possible to compile instrumented and
noninstrumented versions side by side.
parent 1ec31878
No related branches found
No related tags found
No related merge requests found
#ifndef OPTIMIZATION_OBJECTIVES_SAMPLES_VIS_H
#define OPTIMIZATION_OBJECTIVES_SAMPLES_VIS_H
#include "utils/vis_only.h"
#ifdef VISUALIZE
#include "samples.h"
#include "utils/eigen_json.h"
namespace optimization {
......@@ -20,4 +19,3 @@ void to_json(nlohmann::json& j, GradientSample<Vector> const& sample) {
}
#endif
#endif
#ifndef OPTIMIZATION_GRADIENT_DESCENT_H
#define OPTIMIZATION_GRADIENT_DESCENT_H
#include "gradient_descent_log.h"
#include "logs/nothing.h"
#include <iostream>
namespace optimization {
//--------------------------------------------------------------------------------------------------
template <int32_t N>
class GradientDescent : public GradientDescentLog<N> {
template <int32_t N, typename Log = GradientDescentLogNothing>
class GradientDescent : public Log {
public:
GradientDescent() {}
GradientDescent(Scalar learning_rate, uint32_t me, Scalar gt)
......@@ -29,8 +29,8 @@ public:
VectorNs<N> gradient;
gradient.resize(point.size());
objective.eval(point, value, gradient);
GradientDescentLog<N>::initialize(objective);
GradientDescentLog<N>::push_back(point, value, gradient);
Log::initialize(objective);
Log::push_back(point, value, gradient);
for (n_evaluations_ = 1; n_evaluations_ < max_evaluations_; ++n_evaluations_) {
if (gradient.norm() <= gradient_threshold_) {
......@@ -41,7 +41,7 @@ public:
point -= learning_rate_ * gradient;
objective.eval(point, value, gradient);
GradientDescentLog<N>::push_back(point, value, gradient);
Log::push_back(point, value, gradient);
}
return point;
......
#ifndef OPTIMIZATION_GRADIENT_DESCENT_LOG_H
#define OPTIMIZATION_GRADIENT_DESCENT_LOG_H
#ifndef OPTIMIZATION_GRADIENT_DESCENT_LOGS_EVERYTHING_H
#define OPTIMIZATION_GRADIENT_DESCENT_LOGS_EVERYTHING_H
#include "utils/vector.h"
#include "utils/vis_only.h"
......@@ -17,8 +17,7 @@ namespace optimization {
// This is used as a base class rather than a member so that the empty base class optimization can
// be applied (the member would take up space even if it is an empty class).
template <int32_t N>
struct GradientDescentLog {
void reserve(uint32_t n) VIS_ONLY_METHOD;
struct GradientDescentLogEverything {
template <typename Objective>
void initialize(Objective const&) VIS_ONLY_METHOD;
void push_back(
......@@ -36,22 +35,16 @@ struct GradientDescentLog {
#ifdef VISUALIZE
//..................................................................................................
template <int32_t N>
void GradientDescentLog<N>::reserve(uint32_t n) {
samples.reserve(n);
}
//..................................................................................................
template <int32_t N>
template <typename Objective>
void GradientDescentLog<N>::initialize(Objective const&) {
void GradientDescentLogEverything<N>::initialize(Objective const&) {
objective_name = Objective::name;
}
//..................................................................................................
template <int32_t N>
void GradientDescentLog<N>::push_back(
void GradientDescentLogEverything<N>::push_back(
VectorNs<N> const& point,
Scalar value,
VectorNs<N> const& gradient
......@@ -61,7 +54,7 @@ void GradientDescentLog<N>::push_back(
//--------------------------------------------------------------------------------------------------
template <int32_t N>
void to_json(nlohmann::json& j, GradientDescentLog<N> const& log) {
void to_json(nlohmann::json& j, GradientDescentLogEverything<N> const& log) {
j = nlohmann::json{
{"algorithm", "gradient descent"},
{"objective", log.objective_name},
......
......@@ -2,14 +2,16 @@
#define OPTIMIZATION_GRADIENT_DESCENT_VIS_H
#include <iostream>
#include "gradient_descent_log.h"
#include "everything.h"
namespace optimization {
//--------------------------------------------------------------------------------------------------
// Provides an way to serialize a subset of an everything log that is compatible with the
// visualization script.
template <int32_t N>
struct GradientDescentVis {
GradientDescentLog<N> const& log;
GradientDescentLogEverything<N> const& log;
};
//..................................................................................................
......
#ifndef OPTIMIZATION_GRADIENT_DESCENT_LOGS_NOTHING_H
#define OPTIMIZATION_GRADIENT_DESCENT_LOGS_NOTHING_H
#include "utils/vector.h"
namespace optimization {
//--------------------------------------------------------------------------------------------------
struct GradientDescentLogNothing {
template <typename Objective>
void initialize(Objective const&) {}
template <int32_t N>
void push_back(VectorNs<N> const& point, Scalar value, VectorNs<N> const& gradient) {}
};
}
#endif
......@@ -2,10 +2,11 @@
#include "gradient_descent.h"
#include "objectives/paraboloid.h"
#include "objectives/rosenbrock.h"
#include "logs/everything.h"
#include <iostream>
#ifdef VISUALIZE
#include "gradient_descent_vis.h"
#include "logs/everything_vis.h"
#include "utils/eigen_json.h"
#include <fstream>
......@@ -54,7 +55,8 @@ int main(int const argc, char const** argv) {
Objective objective;
objective.dim() = dim;
GradientDescent<-1> optimizer(learning_rate, max_evaluations, gradient_threshold);
using Log = GradientDescentLogEverything<-1>;
GradientDescent<-1, Log> optimizer(learning_rate, max_evaluations, gradient_threshold);
VectorXs minimum = optimizer.optimize(objective, initial_point);
std::cout << "n evaluations: " << optimizer.n_evaluations() << '\n';
std::cout << "final point: " << minimum << '\n';
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment