// suppress conversion warnings before other includes# include <cppad/wno_conversion.hpp>
//# include <FADBAD++/badiff.h>
# include <cppad/speed/det_by_lu.hpp>
# include <cppad/speed/uniform_01.hpp>
# include <cppad/utility/vector.hpp>
// list of possible options# include <map>
extern std::map<std::string, bool> global_option;
bool link_det_lu(
size_t size ,
size_t repeat ,
CppAD::vector<double> &matrix ,
CppAD::vector<double> &gradient )
{
// speed test global option valuesif( global_option["onetape"] || global_option["atomic"] )
returnfalse;
if( global_option["memory"] || global_option["optimize"] )
returnfalse;
// -----------------------------------------------------// setup//// object for computing determinanttypedef fadbad::B<double> ADScalar;
typedef CppAD::vector<ADScalar> ADVector;
CppAD::det_by_lu<ADScalar> Det(size);
size_t i; // temporary index
size_t m = 1; // number of dependent variables
size_t n = size * size; // number of independent variables
ADScalar detA; // AD value of the determinant
ADVector A(n); // AD version of matrix// ------------------------------------------------------while(repeat--)
{ // get the next matrix
CppAD::uniform_01(n, matrix);
// set independent variable valuesfor(i = 0; i < n; i++)
A[i] = matrix[i];
// compute the determinant
detA = Det(A);
// create function object f : A -> detA
detA.diff(0, (unsigned int) m); // index 0 of m dependent variables// evaluate and return gradient using reverse modefor(i =0; i < n; i++)
gradient[i] = A[i].d(0); // partial detA w.r.t A[i]
}
// ---------------------------------------------------------returntrue;
}