/usr/include/trilinos/NLPInterfacePack_NLPObjGrad.hpp is in libtrilinos-dev 10.4.0.dfsg-1ubuntu2.
This file is owned by root:root, with mode 0o644.
The actual contents of the file can be viewed below.
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 | // @HEADER
// ***********************************************************************
//
// Moocho: Multi-functional Object-Oriented arCHitecture for Optimization
// Copyright (2003) Sandia Corporation
//
// Under terms of Contract DE-AC04-94AL85000, there is a non-exclusive
// license for use of this work by or on behalf of the U.S. Government.
//
// This library is free software; you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as
// published by the Free Software Foundation; either version 2.1 of the
// License, or (at your option) any later version.
//
// This library is distributed in the hope that it will be useful, but
// WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public
// License along with this library; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
// USA
// Questions? Contact Roscoe A. Bartlett (rabartl@sandia.gov)
//
// ***********************************************************************
// @HEADER
#ifndef NLP_OBJ_GRADIENT_H
#define NLP_OBJ_GRADIENT_H
#include "NLPInterfacePack_NLP.hpp"
namespace NLPInterfacePack {
/** \brief %NLP interface class that adds gradient information for the objective function {abstract}.
*
* <b>Overview:</b>
*
* This class adds the ability to compute the gradient of the objective function
* \c Gf(x) to the basic information given in the \c NLP interface class. Note that
* \c Gf is in the vector space \c space_x().
*
* <b>Client Usage:</b>
*
* As with the <tt>NLP</tt> base interface, the <tt>initialize()</tt> method must be called before
* the %NLP object can be used. The method <tt>set_Gf()</tt> is used to set a pointer to a vector
* to update when the gradient of the objective \c Gf is computed when <tt>calc_Gf()</tt> is called.
*
* The number of evaluations of \c Gf using <tt>calc_Gf()</tt> is returned by <tt>num_Gf_evals()</tt>.
*
* <b>Subclass developer's notes:</b>
*
* <A NAME="must_override"></A>
* In addition to the methods that must be overridden by the <tt>NLP</tt> interface
* (<A HREF="classNLPInterfacePack_1_1NLP.html#must_override">see</A>) the following methods
* must also be overridden: <tt>imp_calc_Gf()</tt>.
*
* <A NAME="should_override"></A>
* In addition to the methods that should be overridden from <tt>%NLP</tt> by most subclasses
* (<A HREF="classNLPInterfacePack_1_1NLP.html#should_override">see</A>), the following
* additional methods should be overridden: \c initialize().
*
* The following methods should never have to be overridden by most subclasses except in some very
* strange situations: \c set_Gf(), \c get_Gf(), \c Gf(), \c num_Gf_evals().
*/
class NLPObjGrad : virtual public NLP {
public:
/** @name Constructors */
//@{
/// Initialize to no reference set to calculation quanities
NLPObjGrad();
//@}
/** @name NLP initialization */
//@{
/** \brief Initialize the NLP for its first use.
*
* This function implementation should be called by subclass implementations
* in order to reset counts for \c f(x), \c c(x), \c h(x) and \c Gf(x) evaluations.
* This implementation calls <tt>this->NLP::initialize()</tt>
*
* Postconditions:<ul>
* <li> See <tt>NLP::initialize()</tt>
* <li> <tt>this->num_Gf_evals() == 0</tt>
* </ul>
*/
void initialize(bool test_setup);
//@}
/** @name Information */
//@{
/** \brief Determine if the objective gradient is supported or not.
*
* The default implementation returns <tt>true</tt>.
*/
virtual bool supports_Gf() const;
/** \brief Determine if the objective gradient product is supported or not.
*
* The default implementation returns <tt>true</tt>.
*/
virtual bool supports_Gf_prod() const;
//@}
/** @name <<std aggr>> members for the gradient of the objective function Gf(x) */
//@{
/** \brief Set a pointer to a vector to be updated when <tt>this->calc_Gf()</tt> is called.
*
* @param Gf [in] Pointer to gradient vector. May be \c NULL.
*
* Preconditions:<ul>
* <li> <tt>this->is_initialized() == true</tt> (throw <tt>NotInitialized</tt>)
* <li> <tt>this->supports_Gf()</tt>
* <li> [<tt>Gf != NULL</tt>] <tt>Gf->space().is_compatible(*this->space_x()) == true</tt>
* (throw <tt>VectorSpace::IncompatibleVectorSpaces</tt>)
* </ul>
*
* Postconditions:<ul>
* <li> <tt>this->get_Gf() == Gf</tt>
* </ul>
*/
virtual void set_Gf(VectorMutable* Gf);
/** \brief Return pointer passed to <tt>this->set_Gf()</tt>.
*
* Preconditions:<ul>
* <li> <tt>this->is_initialized() == true</tt> (throw <tt>NotInitialized</tt>)
* <li> <tt>this->supports_Gf()</tt>
* </ul>
*/
virtual VectorMutable* get_Gf();
/** \brief Returns non-<tt>const</tt> <tt>*this->get_Gf()</tt>.
*
* Preconditions:<ul>
* <li> <tt>this->is_initialized() == true</tt> (throw <tt>NotInitialized</tt>)
* <li> <tt>this->supports_Gf()</tt>
* <li> <tt>this->get_Gf() != NULL</tt> (throw <tt>NoRefSet</tt>)
* </ul>
*/
virtual VectorMutable& Gf();
/** \brief Returns <tt>const</tt> <tt>*this->get_Gf()</tt>.
*
* Preconditions:<ul>
* <li> <tt>this->is_initialized() == true</tt> (throw <tt>NotInitialized</tt>)
* <li> <tt>this->supports_Gf()</tt>
* <li> <tt>this->get_Gf() != NULL</tt> (throw <tt>NoRefSet</tt>)
* </ul>
*/
virtual const Vector& Gf() const;
//@}
/** @name Unset calculation quantities */
//@{
/** \brief Call to unset all storage quantities (both in this class and all subclasses).
*
* Preconditions:<ul>
* <li> <tt>this->is_initialized() == true</tt> (throw <tt>NotInitialized</tt>)
* </ul>
*
* Postconditions:<ul>
* <li> See <tt>NLP::unset_quantities()</tt>
* <li> <tt>this->get_Gf() == NULL</tt>
* </ul>
*
* This method must be called by all subclasses that override it.
*/
void unset_quantities();
//@}
/** @name Calculation Members */
//@{
/** \brief Update the vector for \c Gf at the point \c x and put it in the stored reference.
*
* @param x [in] Point at which to calculate the gradient of the objective <tt>Gf(x)</tt>.
* @param newx [in] (default \c true) If \c false, the values in \c x are assumed to be the same as
* the last call to a <tt>this->calc_*(x,newx)</tt> member.
* If \c true, the values in \c x are assumed to not be the same as the last call to a
* <tt>this->calc_*(x,newx)</tt> member.
*
* Preconditions:<ul>
* <li> <tt>this->is_initialized() == true</tt> (throw <tt>NotInitialized</tt>)
* <li> <tt>this->supports_Gf()</tt>
* <li> <tt>x.space().is_compatible(*this->space_x()) == true</tt> (throw <tt>VectorSpace::IncompatibleVectorSpaces</tt>)
* <li> <tt>this->get_Gf() != NULL</tt> (throw <tt>NoRefSet</tt>)
* </ul>
*
* Postconditions:<ul>
* <li> <tt>this->Gf()</tt> is updated to \c Gf(x)
* </ul>
*
* If <tt>set_multi_calc(true)</tt> was called then referenced storage for \c f and/or \c c
* may also be updated but are not guaranteed to be. But no other quanities from possible subclasses are allowed
* to be updated as a side effect (i.e. no higher order derivatives).
*/
virtual void calc_Gf(const Vector& x, bool newx = true) const;
/** \brief Calculate the inner product <tt>Gf(x)'*d</tt> at the point <tt>x</tt> and put it in the stored reference.
*
* @param x [in] Base point
* @param d [in] Direction to compute the product along.
* @param newx [in] (default \c true) If \c false, the values in \c x are assumed to be the same as
* the last call to a <tt>this->calc_*(x,newx)</tt> member.
* If \c true, the values in \c x are assumed to not be the same as the last call to a
* <tt>this->calc_*(x,newx)</tt> member.
*
* Preconditions:<ul>
* <li> <tt>this->is_initialized() == true</tt> (throw <tt>NotInitialized</tt>)
* <li> <tt>this->supports_Gf()</tt>
* <li> <tt>x.space().is_compatible(*this->space_x()) == true</tt> (throw <tt>VectorSpace::IncompatibleVectorSpaces</tt>)
* </ul>
*
* Postconditions:<ul>
* <li> <tt>return</tt> gives the desired product.
* </ul>
*
* If <tt>set_multi_calc(true)</tt> was called then referenced storage for \c f and/or \c c
* may also be updated but are not guaranteed to be. But no other quanities from possible subclasses are allowed
* to be updated as a side effect (i.e. no higher order derivatives).
*/
virtual value_type calc_Gf_prod(const Vector& x, const Vector& d, bool newx = true) const;
//@}
//@}
/** @name Function evaluation counts. */
//@{
/** \brief Objective gradient evaluations count.
*
* This function can be called to find out how many evaluations
* \c this->calc_Gf() the client requested since \c this->initialize() was called.
*/
virtual size_type num_Gf_evals() const;
//@}
/** @name Protected types */
//@{
/** \brief Struct for gradient (objective), objective and constriants (pointers)
*/
struct ObjGradInfo {
/** \brief . */
ObjGradInfo()
: Gf(NULL), f(NULL), c(NULL)
{}
/** \brief . */
ObjGradInfo( VectorMutable* Gf_in, const ZeroOrderInfo& first_order_info_in )
: Gf(Gf_in), f(first_order_info_in.f), c(first_order_info_in.c)
{}
/// Pointer to gradient of objective function <tt>Gf</tt> (may be NULL if not set)
VectorMutable* Gf;
/// Pointer to objective function <tt>f</tt> (may be NULL if not set)
value_type* f;
/// Pointer to constraints residual <tt>c</tt> (may be NULL if not set)
VectorMutable* c;
}; // end struct ObjGradInfo
//@}
protected:
/// Return objective gradient and zero order information.
const ObjGradInfo obj_grad_info() const;
/** @name Protected methods to be overridden by subclasses */
//@{
/** \brief Overridden to compute f(x) and perhaps c(x) (if multiple calculaiton = true).
*
* Preconditions:<ul>
* <li> <tt>x.space().is_compatible(*this->space_x())</tt> (throw <tt>IncompatibleType</tt>)
* <li> <tt>obj_grad_info.Gf != NULL</tt> (throw <tt>std::invalid_argument</tt>)
* </ul>
*
* Postconditions:<ul>
* <li> <tt>*obj_grad_info.Gf</tt> is updated to \a Gf(x).
* </ul>
*
* @param x [in] Unknown vector (size n).
* @param newx [in] (default \c true) If \c false, the values in \c x are assumed to be the same as
* the last call to a <tt>this->imp_calc_*(x,newx)</tt> member.
* If \c true, the values in \c x are assumed to not be the same as the last call to a
* <tt>this->imp_calc_*(x,newx)</tt> member.
* @param obj_grad_info
* [out] Pointers to \c f, \c c and \c Gf.
* On output <tt>*obj_grad_info.Gf</tt> is updated to \a Gf(x).
* Any of the other objects pointed to in
* \c obj_grad_info may be set if <tt>this->multi_calc() == true</tt> but are
* now guaranteed to be.
*/
virtual void imp_calc_Gf(const Vector& x, bool newx, const ObjGradInfo& obj_grad_info) const = 0;
//@}
private:
mutable VectorMutable *Gf_;
mutable size_type num_Gf_evals_;
}; // end class NLPObjGrad
// //////////////////
// Inline members
inline
const NLPObjGrad::ObjGradInfo NLPObjGrad::obj_grad_info() const
{
return ObjGradInfo(Gf_,zero_order_info());
}
} // end namespace NLPInterfacePack
#endif // NLP_OBJ_GRADIENT_H
|