Skip to content

Commit

Permalink
Remove more R__HAS_VECCORE guards
Browse files Browse the repository at this point in the history
  • Loading branch information
xvallspl authored and lmoneta committed Jul 4, 2017
1 parent cbc38d6 commit 325133e
Show file tree
Hide file tree
Showing 2 changed files with 7 additions and 18 deletions.
10 changes: 5 additions & 5 deletions math/mathcore/inc/Fit/Fitter.h
Original file line number Diff line number Diff line change
Expand Up @@ -83,6 +83,8 @@ class Fitter {
using IModelFunctionTempl = ROOT::Math::IParamMultiFunctionTempl<T>;
#ifdef R__HAS_VECCORE
typedef ROOT::Math::IParametricFunctionMultiDimTempl<ROOT::Double_v> IModelFunction_v;
#else
typedef ROOT::Math::IParamMultiFunction IModelFunction_v;
#endif
typedef ROOT::Math::IParamMultiGradFunction IGradModelFunction;
typedef ROOT::Math::IParamFunction IModel1DFunction;
Expand Down Expand Up @@ -171,7 +173,7 @@ class Fitter {
SetData(data);
return DoBinnedLikelihoodFit(extended, executionPolicy);
}

bool LikelihoodFit(const std::shared_ptr<BinData> &data, bool extended = true,
ROOT::Fit::ExecutionPolicy executionPolicy = ROOT::Fit::kSerial) {
SetData(data);
Expand Down Expand Up @@ -490,11 +492,9 @@ class Fitter {
int fDataSize; // size of data sets (need for Fumili or LM fitters)

FitConfig fConfig; // fitter configuration (options and parameter settings)
#ifdef R__HAS_VECCORE

std::shared_ptr<IModelFunction_v> fFunc_v; //! copy of the fitted function containing on output the fit result
#else
std::shared_ptr<IModelFunction> fFunc_v; //dummy for when VecCore not available. Keeps the code cleaner.
#endif

std::shared_ptr<IModelFunction> fFunc; //! copy of the fitted function containing on output the fit result

std::shared_ptr<ROOT::Fit::FitResult> fResult; //! pointer to the object containing the result of the fit
Expand Down
15 changes: 2 additions & 13 deletions math/mathcore/src/Fitter.cxx
Original file line number Diff line number Diff line change
Expand Up @@ -339,12 +339,10 @@ bool Fitter::DoLeastSquareFit(ROOT::Fit::ExecutionPolicy executionPolicy) {
if (!fFunc_v ) {
MATH_ERROR_MSG("Fitter::DoLeastSquareFit","model function is not set");
return false;
#ifdef R__HAS_VECCORE
} else{
Chi2FCN<BaseFunc, IModelFunction_v> chi2(data, fFunc_v, executionPolicy);
fFitType = chi2.Type();
return DoMinimization (chi2);
#endif
}
} else {

Expand Down Expand Up @@ -410,7 +408,6 @@ bool Fitter::DoBinnedLikelihoodFit(bool extended, ROOT::Fit::ExecutionPolicy exe

if (!fUseGradient) {
// do minimization without using the gradient
#ifdef R__HAS_VECCORE
if (fFunc_v) {
// create a chi2 function to be used for the equivalent chi-square
Chi2FCN<BaseFunc, IModelFunction_v> chi2(data, fFunc_v);
Expand All @@ -433,10 +430,7 @@ bool Fitter::DoBinnedLikelihoodFit(bool extended, ROOT::Fit::ExecutionPolicy exe
logl.UseSumOfWeightSquare();
if (!ApplyWeightCorrection(logl)) return false;
}
#endif
#ifdef R__HAS_VECCORE
}
#endif
} else {
// create a chi2 function to be used for the equivalent chi-square
Chi2FCN<BaseFunc> chi2(data, fFunc);
Expand Down Expand Up @@ -501,7 +495,6 @@ bool Fitter::DoUnbinnedLikelihoodFit(bool extended, ROOT::Fit::ExecutionPolicy e

if (!fUseGradient) {
// do minimization without using the gradient
#ifdef R__HAS_VECCORE
if (fFunc_v ){
LogLikelihoodFCN<BaseFunc, IModelFunction_v> logl(data, fFunc_v, useWeight, extended, executionPolicy);
fFitType = logl.Type();
Expand All @@ -511,10 +504,8 @@ bool Fitter::DoUnbinnedLikelihoodFit(bool extended, ROOT::Fit::ExecutionPolicy e
if (!ApplyWeightCorrection(logl) ) return false;
}
return true;
}
else{
#endif
LogLikelihoodFCN<BaseFunc> logl(data, fFunc, useWeight, extended, executionPolicy);
} else {
LogLikelihoodFCN<BaseFunc> logl(data, fFunc, useWeight, extended, executionPolicy);

fFitType = logl.Type();
if (!DoMinimization (logl) ) return false;
Expand All @@ -523,9 +514,7 @@ bool Fitter::DoUnbinnedLikelihoodFit(bool extended, ROOT::Fit::ExecutionPolicy e
if (!ApplyWeightCorrection(logl) ) return false;
}
return true;
#ifdef R__HAS_VECCORE
}
#endif
} else {
// use gradient : check if fFunc provides gradient
if (fConfig.MinimizerOptions().PrintLevel() > 0)
Expand Down

0 comments on commit 325133e

Please sign in to comment.