NProblemInterfaceAD.hpp 10.4 KB
Newer Older
1
2
3
4
5
6
7
8
9
10
11
12
13
/*
 * NProblemInterfaceAD.hpp
 *
 *  Created on: Nov 30, 2012
 *      Author: kremer
 */

#ifndef NPROBLEMINTERFACEAD_HPP_
#define NPROBLEMINTERFACEAD_HPP_

//== COMPILE-TIME PACKAGE REQUIREMENTS ========================================
#include <CoMISo/Config/config.hh>
#if COMISO_ADOLC_AVAILABLE
David Bommes's avatar
David Bommes committed
14
#if COMISO_EIGEN3_AVAILABLE
15
16
17
18
19

//== INCLUDES =================================================================

#include <vector>

20
21
#include <boost/shared_array.hpp>

22
23
24
25
26
27
28
29
30
31
32
33
34
35
#include <adolc/adolc.h>
#include <adolc/adouble.h>
#include <adolc/drivers/drivers.h>
#include <adolc/sparse/sparsedrivers.h>
#include <adolc/taping.h>

#include <Eigen/Eigen>
#if !(EIGEN_VERSION_AT_LEAST(3,1,0))
#define EIGEN_YES_I_KNOW_SPARSE_MODULE_IS_NOT_STABLE_YET
#endif
#include <Eigen/Sparse>

#include <CoMISo/Config/CoMISoDefines.hh>

36
37
#include "TapeIDSingleton.hh"

38
39
#include "NProblemInterface.hh"

40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
//== FORWARDDECLARATIONS ======================================================

//== NAMESPACES ===============================================================

namespace COMISO {

//== CLASS DEFINITION =========================================================

/** \class NProblemmInterfaceAD NProblemInterfaceAD.hpp <ACG/.../NProblemInterfaceAD.hh>

 Brief Description.

 Extend the problem interface with auto differentiation using ADOL-C
 */
class COMISODLLEXPORT NProblemInterfaceAD : public NProblemInterface
{
public:

    // Sparse Matrix Type
#if EIGEN_VERSION_AT_LEAST(3,1,0)
    typedef Eigen::SparseMatrix<double,Eigen::ColMajor> SMatrixNP;
#else
    typedef Eigen::DynamicSparseMatrix<double,Eigen::ColMajor> SMatrixNP;
#endif

    /// Default constructor
    NProblemInterfaceAD(int _n_unknowns) :
    n_unknowns_(_n_unknowns),
68
    dense_hessian_(NULL),
69
    function_evaluated_(false),
70
    use_tape_(true),
71
    tape_(static_cast<short int>(TapeIDSingleton::Instance()->requestId())) {
72

73
        for(size_t i = 0; i < 11; ++i) tape_stats_[i] = 0;
74
75
76
77
    }

    /// Destructor
    virtual ~NProblemInterfaceAD() {
78
79
80
81
82
83
84

        if(dense_hessian_ != NULL) {
            for(int i = 0; i < n_unknowns_; ++i) {
                delete[] dense_hessian_[i];
            }
            delete[] dense_hessian_;
        }
85
86

        TapeIDSingleton::Instance()->releaseId(static_cast<size_t>(tape_));
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
    }

    // ================================================
    // Override these three methods
    // ================================================

    virtual void initial_x(double* _x) = 0;

    virtual adouble evaluate(const adouble* _x) = 0;

    virtual void store_result(const double* _x) = 0;

    // ================================================
    // Optionally override these methods, too
    // ================================================

    /**
     * \brief Indicate whether the hessian is sparse.
     * If so, the computations (as well as the memory
     * consumption) can be performed more efficiently.
     */
    virtual bool sparse_hessian() {
        return false;
    }

    // ================================================

    virtual int n_unknowns() {

        return n_unknowns_;
    }

    virtual double eval_f(const double* _x) {

        double y = 0.0;

123
        if(!function_evaluated_ || !use_tape_) {
124

125
126
            adouble y_d = 0.0;

127
128
            boost::shared_array<adouble> x_d = x_d_ptr();

129
            trace_on(tape_); // Start taping
130
131
132

            // Fill data vector
            for(int i = 0; i < n_unknowns_; ++i) {
133
                x_d[i] <<= _x[i];
134
135
136
137
            }

            // Call virtual function to compute
            // functional value
138
            y_d = evaluate(x_d.get());
139

140
            y_d >>= y;
141

142
            trace_off();
143

144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
#ifdef ADOLC_STATS
            tapestats(tape_, tape_stats_);
            std::cout << "Status values for tape " << tape_ << std::endl;
            std::cout << "===============================================" << std::endl;
            std::cout << "Number of independent variables:\t" << tape_stats_[0] << std::endl;
            std::cout << "Number of dependent variables:\t\t" << tape_stats_[1] << std::endl;
            std::cout << "Max. number of live active variables:\t" << tape_stats_[2] << std::endl;
            std::cout << "Size of value stack:\t\t\t" << tape_stats_[3] << std::endl;
            std::cout << "Buffer size:\t\t\t\t" << tape_stats_[4] << std::endl;
            std::cout << "Total number of operations recorded:\t" << tape_stats_[5] << std::endl;
            std::cout << "Other stats [6]:\t\t\t" << tape_stats_[6] << std::endl;
            std::cout << "Other stats [7]:\t\t\t" << tape_stats_[7] << std::endl;
            std::cout << "Other stats [8]:\t\t\t" << tape_stats_[8] << std::endl;
            std::cout << "Other stats [9]:\t\t\t" << tape_stats_[9] << std::endl;
            std::cout << "Other stats [10]:\t\t\t" << tape_stats_[10] << std::endl;
            std::cout << "===============================================" << std::endl;
#endif
161
162
163
164
165
166
167

            function_evaluated_ = true;

        } else {

            double ay[1] = {0.0};

168
            int ec = function(tape_, 1, n_unknowns_, const_cast<double*>(_x), ay);
169

170
#ifdef ADOLC_RET_CODES
171
            std::cout << "Info: function() returned code " << ec << std::endl;
172
173
#endif

174
175
            y = ay[0];
        }
Mike Kremer's avatar
Mike Kremer committed
176

177
178
179
180
181
182
183
184
185
186
        return y;
    }

    virtual void eval_gradient(const double* _x, double* _g) {

        if(!function_evaluated_ || !use_tape_) {
            // Evaluate original functional
            eval_f(_x);
        }

187
188
        int ec = gradient(tape_, n_unknowns_, _x, _g);

Mike Kremer's avatar
Mike Kremer committed
189
        if(ec < 0) {
190
191
            // Retape function if return code indicates discontinuity
            function_evaluated_ = false;
Mike Kremer's avatar
Mike Kremer committed
192
#ifdef ADOLC_RET_CODES
193
            std::cout << __FUNCTION__ << " invokes retaping of function due to discontinuity! Return code: " << ec << std::endl;
Mike Kremer's avatar
Mike Kremer committed
194
#endif
195
196
197
            eval_f(_x);
            ec = gradient(tape_, n_unknowns_, _x, _g);
        }
198

199
#ifdef ADOLC_RET_CODES
200
201
        std::cout << "Info: gradient() returned code " << ec << std::endl;
#endif
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
    }

    virtual void eval_hessian(const double* _x, SMatrixNP& _H) {

        _H.resize(n_unknowns_, n_unknowns_);
        _H.setZero();

        if(!function_evaluated_ || !use_tape_) {
            // Evaluate original functional
            eval_f(_x);
        }

        if(sparse_hessian()) {

            int nz = 0;
            int opt[2] = {0, 0};

219
220
221
222
            unsigned int* r_ind_p = NULL;
            unsigned int* c_ind_p = NULL;
            double* val_p = NULL;

223
224
            int ec = sparse_hess(tape_, n_unknowns_, 0, _x, &nz, &r_ind_p, &c_ind_p, &val_p, opt);

Mike Kremer's avatar
Mike Kremer committed
225
            if(ec < 0) {
226
227
                // Retape function if return code indicates discontinuity
                function_evaluated_ = false;
Mike Kremer's avatar
Mike Kremer committed
228
#ifdef ADOLC_RET_CODES
229
                std::cout << __FUNCTION__ << " invokes retaping of function due to discontinuity! Return code: " << ec << std::endl;
Mike Kremer's avatar
Mike Kremer committed
230
#endif
231
232
233
                eval_f(_x);
                ec = sparse_hess(tape_, n_unknowns_, 0, _x, &nz, &r_ind_p, &c_ind_p, &val_p, opt);
            }
234

Mike Kremer's avatar
Mike Kremer committed
235
            assert(nz >= 0);
236
237
238
            assert(r_ind_p != NULL);
            assert(c_ind_p != NULL);
            assert(val_p != NULL);
239

240
#ifdef ADOLC_RET_CODES
241
242
243
244
245
            std::cout << "Info: sparse_hessian() returned code " << ec << std::endl;
#endif

            for(int i = 0; i < nz; ++i) {

246
                _H.coeffRef(r_ind_p[i], c_ind_p[i]) = val_p[i];
247
248
            }

249
250
251
            delete[] r_ind_p;
            delete[] c_ind_p;
            delete[] val_p;
252

253
254
        } else {

255
256
            // Dense hessian data
            double** h_ptr = dense_hessian_ptr();
257

258
            int ec = hessian(tape_, n_unknowns_, const_cast<double*>(_x), h_ptr);
259

Mike Kremer's avatar
Mike Kremer committed
260
            if(ec < 0) {
261
262
                // Retape function if return code indicates discontinuity
                function_evaluated_ = false;
Mike Kremer's avatar
Mike Kremer committed
263
#ifdef ADOLC_RET_CODES
264
                std::cout << __FUNCTION__ << " invokes retaping of function due to discontinuity! Return code: " << ec << std::endl;
Mike Kremer's avatar
Mike Kremer committed
265
#endif
266
267
268
269
270
                eval_f(_x);
                ec = hessian(tape_, n_unknowns_, const_cast<double*>(_x), h_ptr);
            }

#ifdef ADOLC_RET_CODES
271
272
273
274
275
276
            std::cout << "Info: hessian() returned code " << ec << std::endl;
#endif

            for(int i = 0; i < n_unknowns_; ++i) {
                for(int j = 0; j <= i; ++j) {

277
                    _H.coeffRef(i, j) = h_ptr[i][j];
278
279

                    if(i != j) {
280
                        _H.coeffRef(j, i) = h_ptr[i][j];
281
282
283
284
285
286
                    }
                }
            }
        }
    }

287
    bool use_tape() const {
288
289
290
291
292
293
294
295
        return use_tape_;
    }

    /** \brief Use tape
     * Set this to false if the energy functional
     * is discontinuous (so that the operator tree
     * has to be re-established at each evaluation)
     */
296
    void use_tape(bool _b) {
297
298
299
        use_tape_ = _b;
    }

300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
    /**
     * \brief Get sample point vector's address
     *
     * The objective function class allocates the
     * memory for this vector at construction.
     * Get the pointer to this vector and inject it
     * into the constraint classes in order to
     * prevent them from allocating their own vectors
     * (which can be inefficient in terms of memory
     * consumption in case there are many constraints)
     */

    boost::shared_array<adouble> x_d_ptr() {
        if(x_d_.get() == NULL) {
            x_d_.reset(new adouble[n_unknowns_]);
        }
        return x_d_;
    }

    boost::shared_array<double> grad_ptr() {
        if(grad_.get() == NULL) {
            grad_.reset(new double[n_unknowns_]);
        }
        return grad_;
    }

    double** dense_hessian_ptr() {
        if(dense_hessian_ == NULL) {
            dense_hessian_ = new double*[n_unknowns_];
            for(int i = 0; i < n_unknowns_; ++i) {
                dense_hessian_[i] = new double[i+1];
            }
        }
        return dense_hessian_;
    }

336
337
338
339
private:

    int n_unknowns_;

340
    // Shared data
341
    boost::shared_array<adouble> x_d_;
342

343
344
345
346
347
348
    // Gradient vector
    boost::shared_array<double> grad_;

    // Dense hessian
    double** dense_hessian_;

349
    size_t tape_stats_[11];
350
351
352

    bool function_evaluated_;
    bool use_tape_;
353
354

    const short int tape_;
355
356
357
358
359
360
361
};

//=============================================================================
} // namespace COMISO
//=============================================================================
#endif // COMISO_ADOLC_AVAILABLE
//=============================================================================
David Bommes's avatar
David Bommes committed
362
#endif // COMISO_EIGEN3_AVAILABLE
363
364
//=============================================================================
#endif /* NPROBLEMINTERFACEAD_HPP_ */