Skip to content
Snippets Groups Projects
derivs.hpp 56.9 KiB
Newer Older
Ian Bell's avatar
Ian Bell committed
#pragma once

#include <complex>
#include <map>
Ian Bell's avatar
Ian Bell committed
#include <numeric>
Ian Bell's avatar
Ian Bell committed
#include "teqp/types.hpp"
#include "teqp/exceptions.hpp"
#if defined(TEQP_MULTICOMPLEX_ENABLED)
#include "MultiComplex/MultiComplex.hpp"
// autodiff include
#include <autodiff/forward/dual.hpp>
#include <autodiff/forward/dual/eigen.hpp>
#include <autodiff/forward/real.hpp>
#include <autodiff/forward/real/eigen.hpp>

using namespace autodiff;

namespace teqp {

/***
* \brief Given a function, use complex step derivatives to calculate the derivative with 
* respect to the first variable which here is temperature
*/
Ian Bell's avatar
Ian Bell committed
template <typename TType, typename ContainerType, typename FuncType>
typename ContainerType::value_type derivT(const FuncType& f, TType T, const ContainerType& rho) {
Ian Bell's avatar
Ian Bell committed
    double h = 1e-100;
    return f(std::complex<TType>(T, h), rho).imag() / h;
Ian Bell's avatar
Ian Bell committed
}

Ian Bell's avatar
Ian Bell committed
#if defined(TEQP_MULTICOMPLEX_ENABLED)
* \brief Given a function, use multicomplex derivatives to calculate the derivative with
* respect to the first variable which here is temperature
*/
template <typename TType, typename ContainerType, typename FuncType>
typename ContainerType::value_type derivTmcx(const FuncType& f, TType T, const ContainerType& rho) {
    using fcn_t = std::function<mcx::MultiComplex<double>(const mcx::MultiComplex<double>&)>;
    fcn_t wrapper = [&rho, &f](const auto& T_) {return f(T_, rho); };
    auto ders = diff_mcx1(wrapper, T, 1);
    return ders[0];
}
Ian Bell's avatar
Ian Bell committed
#endif

/***
* \brief Given a function, use complex step derivatives to calculate the derivative with respect 
* to the given composition variable
template <typename TType, typename ContainerType, typename FuncType, typename Integer>
typename ContainerType::value_type derivrhoi(const FuncType& f, TType T, const ContainerType& rho, Integer i) {
Ian Bell's avatar
Ian Bell committed
    double h = 1e-100;
Ian Bell's avatar
Ian Bell committed
    using comtype = std::complex<typename ContainerType::value_type>;
    Eigen::ArrayX<comtype> rhocom(rho.size());
    for (auto j = 0; j < rho.size(); ++j) {
        rhocom[j] = comtype(rho[j], 0.0);
Ian Bell's avatar
Ian Bell committed
    }
    rhocom[i] = comtype(rho[i], h);
Ian Bell's avatar
Ian Bell committed
    return f(T, rhocom).imag() / h;
Ian Bell's avatar
Ian Bell committed
}

/// Helper function for build_duplicated_tuple
/// See example here for the general concept https://en.cppreference.com/w/cpp/utility/integer_sequence
template<typename T, size_t ... I>
auto build_duplicated_tuple_impl(const T& val, std::index_sequence<I...>) {
    return std::make_tuple((static_cast<void>(I), val)...);  // The comma operator (a,b) evaluates the first argument and discards it, and keeps the second argument 
}

/// A function to generate a tuple of N repeated copies of argument val at compile-time
template<int N, typename T>
auto build_duplicated_tuple(const T& val) {
    return build_duplicated_tuple_impl(val, std::make_index_sequence<N>());
}

/// A class to help with the forwarding of arguments to wrt of autodiff.  std::apply cannot be applied to 
/// wrt directly because the wrt uses perfect forwarding and the compiler cannot work out argument types
/// but by making the struct with operator(), the compiler can figure out the argument types
struct wrt_helper {
    template<typename... Args>
    auto operator()(Args&&... args) const
    {
        return Wrt<Args&&...>{ std::forward_as_tuple(std::forward<Args>(args)...) };
    }
};

Ian Bell's avatar
Ian Bell committed
enum class AlphaWrapperOption {residual, idealgas};
/**
* \brief This class is used to wrap a model that exposes the generic 
* functions alphar, alphaig, etc., and allow the desired member function to be
* called at runtime via perfect forwarding
* 
* This class is needed because conventional binding methods (e.g., std::bind)
* require the argument types to be known, and they are not known in this case
* so we give the hard work of managing the argument types to the compiler
*/
Ian Bell's avatar
Ian Bell committed
template<AlphaWrapperOption o, class Model>
struct AlphaCallWrapper {
    const Model& m_model;
    AlphaCallWrapper(const Model& model) : m_model(model) {};

    template <typename ... Args>
    auto alpha(const Args& ... args) const {
Ian Bell's avatar
Ian Bell committed
        if constexpr (o == AlphaWrapperOption::residual) {
            // The alphar method is REQUIRED to be implemented by all
            // models, so can just call it via perfect fowarding
            return m_model.alphar(std::forward<const Args>(args)...);
        }
        else {
            //throw teqp::InvalidArgument("Missing implementation for alphaig");
            return m_model.alphaig(std::forward<const Args>(args)...);
enum class ADBackends { autodiff
#if defined(TEQP_MULTICOMPLEX_ENABLED)
    ,multicomplex
#endif
    ,complex_step
template<typename Model, typename Scalar = double, typename VectorType = Eigen::ArrayXd>
struct TDXDerivatives {
Ian Bell's avatar
Ian Bell committed
    static auto get_Ar00(const Model& model, const Scalar& T, const Scalar& rho, const VectorType& molefrac) {
        return model.alphar(T, rho, molefrac);
    }

    /**
    * Calculate the derivative \f$\Lambda_{xy}\f$, where 
    * \f[
    * \Lambda^{\rm r}_{ij} = (1/T)^i\rho^j\left(\frac{\partial^{i+j}(\alpha^*)}{\partial(1/T)^i\partial\rho^j}\right)
    * \f]
    * 
    * Note: none of the intermediate derivatives are returned, although they are calculated
    template<int iT, int iD, ADBackends be = ADBackends::autodiff, class AlphaWrapper>
    static auto get_Agenxy(const AlphaWrapper& w, const Scalar& T, const Scalar& rho, const VectorType& molefrac) {

        static_assert(iT > 0 || iD > 0);
        if constexpr (iT == 0 && iD > 0) {
            if constexpr (be == ADBackends::autodiff) {
                // If a pure derivative, then we can use autodiff::Real for that variable and Scalar for other variable
                autodiff::Real<iD, Scalar> rho_ = rho;
                auto f = [&w, &T, &molefrac](const auto& rho__) { return w.alpha(T, rho__, molefrac); };
                return powi(rho, iD)*derivatives(f, along(1), at(rho_))[iD];
            }
            else if constexpr (iD == 1 && be == ADBackends::complex_step) {
                double h = 1e-100;
                auto rho_ = std::complex<Scalar>(rho, h);
                return powi(rho, iD) * w.alpha(T, rho_, molefrac).imag() / h;
            }
#if defined(TEQP_MULTICOMPLEX_ENABLED)
            else if constexpr (be == ADBackends::multicomplex) {
                using fcn_t = std::function<mcx::MultiComplex<Scalar>(const mcx::MultiComplex<Scalar>&)>;
                fcn_t f = [&](const auto& rhomcx) { return w.alpha(T, rhomcx, molefrac); };
                auto ders = diff_mcx1(f, rho, iD, true /* and_val */);
                return powi(rho, iD)*ders[iD];
            }
                throw std::invalid_argument("algorithmic differentiation backend is invalid in get_Agenxy for iT == 0");
        else if constexpr (iT > 0 && iD == 0) {
            Scalar Trecip = 1.0 / T;
            if constexpr (be == ADBackends::autodiff) {
                // If a pure derivative, then we can use autodiff::Real for that variable and Scalar for other variable
                autodiff::Real<iT, Scalar> Trecipad = Trecip;
                auto f = [&w, &rho, &molefrac](const auto& Trecip__) {return w.alpha(forceeval(1.0/Trecip__), rho, molefrac); };
                return powi(Trecip, iT)*derivatives(f, along(1), at(Trecipad))[iT];
            else if constexpr (iT == 1 && be == ADBackends::complex_step) {
                double h = 1e-100;
                auto Trecipcsd = std::complex<Scalar>(Trecip, h);
                return powi(Trecip, iT)* w.alpha(1/Trecipcsd, rho, molefrac).imag()/h;
            }
#if defined(TEQP_MULTICOMPLEX_ENABLED)
            else if constexpr (be == ADBackends::multicomplex) {
                using fcn_t = std::function<mcx::MultiComplex<Scalar>(const mcx::MultiComplex<Scalar>&)>;
                fcn_t f = [&](const auto& Trecipmcx) { return w.alpha(1.0/Trecipmcx, rho, molefrac); };
                auto ders = diff_mcx1(f, Trecip, iT, true /* and_val */);
                return powi(Trecip, iT)*ders[iT];
                throw std::invalid_argument("algorithmic differentiation backend is invalid in get_Agenxy for iD == 0");
        else { // iT > 0 and iD > 0
            if constexpr (be == ADBackends::autodiff) {
                using adtype = autodiff::HigherOrderDual<iT + iD, double>;
                adtype Trecipad = 1.0 / T, rhoad = rho;
Ian Bell's avatar
Ian Bell committed
                auto f = [&w, &molefrac](const adtype& Trecip, const adtype& rho_) {
                    adtype T_ = 1.0/Trecip;
                    return eval(w.alpha(T_, rho_, molefrac)); };
                auto wrts = std::tuple_cat(build_duplicated_tuple<iT>(std::ref(Trecipad)), build_duplicated_tuple<iD>(std::ref(rhoad)));
Loading
Loading full blame...