// Glen Cowan // RHUL Physics // 25 June, 2011 // This is a simple RooStats macro to illustrate // the problem of observing n events assumed to be // Poisson distributed with a mean s + b, where // s, the parameter if interest, is the expected // of signal events and b is the expected number // of background events. In this version of the // example, we assume b is known. // // The goals of the exercise are: // // 1) For a given observed n and known b, find the discovery // significance (p-value of the background-only hypothesis). // // 2) For a given n and known b, find the upper limit on s #include "TStopwatch.h" #include "TCanvas.h" #include "TROOT.h" #include "TMath.h" #include "RooPlot.h" #include "RooAbsPdf.h" #include "RooWorkspace.h" #include "RooDataSet.h" #include "RooGlobalFunc.h" #include "RooFitResult.h" #include "RooRandom.h" #include "RooPoisson.h" #include "RooStats/ProfileLikelihoodCalculator.h" #include "RooStats/LikelihoodInterval.h" #include "RooStats/LikelihoodIntervalPlot.h" #include "RooStats/BayesianCalculator.h" #include "RooStats/MCMCCalculator.h" #include "RooStats/MCMCInterval.h" #include "RooStats/MCMCIntervalPlot.h" #include "RooStats/ProposalHelper.h" #include "RooStats/SimpleInterval.h" #include "RooStats/FeldmanCousins.h" #include "RooStats/PointSetInterval.h" #include "RooStats/HypoTestResult.h" #include "RooStats/ToyMCSampler.h" #include "RooStats/ProfileLikelihoodTestStat.h" using namespace RooFit; using namespace RooStats; void SimpleCount2(){ double nObs = 35; double bVal = 25; double CL = 0.95; bool oneSided = true; // put model into workspace RooWorkspace* wspace = new RooWorkspace("wspace"); // make model RooRealVar* n = (RooRealVar*) wspace->factory("n[0., 1000.]"); RooRealVar* s = (RooRealVar*) wspace->factory("s[0., 40.]"); RooRealVar* b = (RooRealVar*) wspace->factory("b[0., 300.]"); wspace->factory("sum::splusb(s,b)"); RooAbsPdf* model = (RooAbsPdf*) wspace->factory("Poisson::model(n,splusb)"); b->setVal(bVal); b->setConstant(); wspace->defineSet("obs","n"); wspace->defineSet("parOfInterest", "s"); wspace->defineSet("nuisPar", "b"); // First create empty data set, set data value and put into workspace RooDataSet* data = new RooDataSet("data", "data", RooArgSet(*n)); // empty n->setVal(nObs); // i.e. nObs events found in data data->add(*n); wspace->import(*data); wspace->Print(); // Make ModelConfig object and put in workspace ModelConfig* modelConfig = new ModelConfig("SimpleCount"); modelConfig->SetWorkspace(*wspace); modelConfig->SetPdf(*wspace->pdf("model")); modelConfig->SetParametersOfInterest(*wspace->set("parOfInterest")); modelConfig->SetNuisanceParameters(*wspace->set("nuisPar")); wspace->import(*modelConfig); wspace->writeToFile("SimpleCount2.root"); // Test hypothesis of s = 0, here using Profile Likelihood // This tool is assuming chi-square asymptotics for a 2-sided interval // and is not correcting for any boundary effects ProfileLikelihoodCalculator plc(*data, *modelConfig); RooArgSet* nullParams = (RooArgSet*)(*wspace->set("parOfInterest")).snapshot(); nullParams->setRealValue("s",0); plc.SetNullParameters(*nullParams); HypoTestResult* htr = plc.GetHypoTest(); double pValue = htr->NullPValue(); double significance = TMath::NormQuantile(1 - pValue); // Find upper limit on s plc.SetConfidenceLevel(CL); LikelihoodInterval* plInt = plc.GetInterval(); LikelihoodIntervalPlot* intPlot = new LikelihoodIntervalPlot(plInt); double sLo_pl = plInt->LowerLimit(*s); double sUp_pl = plInt->UpperLimit(*s); cout << "sLo_pl = " << sLo_pl << endl; cout << "sUp_pl = " << sUp_pl << endl; intPlot->Draw(); // Use Feldman-Cousins FeldmanCousins fc(*data, *modelConfig); fc.SetConfidenceLevel(CL); //number counting: dataset always has 1 entry with N events observed fc.FluctuateNumDataEntries(false); fc.UseAdaptiveSampling(true); if(oneSided){ ///////////////////////////////////////////// // Feldman-Cousins is a unified limit by definition // but the tool takes care of a few things for us like which values // of the nuisance parameters should be used to generate toys. // So let's just change the test statistic and realize this is // no longer "Feldman-Cousins" but is a fully frequentist Neyman-Construction. // This will lead to a tighter upper-limit than the Feldman-Cousins approach ToyMCSampler* toymcsampler = (ToyMCSampler*) fc.GetTestStatSampler(); ProfileLikelihoodTestStat* testStat = dynamic_cast(toymcsampler->GetTestStatistic()); testStat->SetOneSided(true); } fc.SetNBins(100); PointSetInterval* fcInt = fc.GetInterval(); double sLo_fc = fcInt->LowerLimit(*s); double sUp_fc = fcInt->UpperLimit(*s); cout << "---------------------------------------"<