Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
25 commits
Select commit Hold shift + click to select a range
4a03bf9
First version of regularization
davidwalter2 Feb 3, 2026
bf4221e
First implementation of curvature scan for regularization
davidwalter2 Feb 5, 2026
fd228b3
Add 'earlyStopping' feature to stop minimization if no reduction afte…
davidwalter2 Feb 5, 2026
a1a6f4d
Implement curvature scan and support for plotting it
davidwalter2 Feb 5, 2026
93f069d
Add flag to ensure numerical reproducebility
davidwalter2 Feb 5, 2026
1ecfbfa
Fix early stopping functionality
davidwalter2 Feb 5, 2026
fdc931d
work on lcurve optimization
davidwalter2 Feb 5, 2026
11fecdb
Add regularization test in CI; improve 'epoch' plotting script to be …
davidwalter2 Feb 6, 2026
bfa6c00
Few smaller fixes
davidwalter2 Feb 10, 2026
2995dee
Remove kaleido
davidwalter2 Feb 16, 2026
a000171
Fix betavariations in case of flow bins
davidwalter2 Feb 16, 2026
6e80a90
Merge branch 'main' of github.com:WMass/rabbit into 260202_regulariza…
davidwalter2 Feb 16, 2026
b32e538
Run new version of black
davidwalter2 Feb 16, 2026
5041628
Merge branch 'main' of github.com:WMass/rabbit into 260202_regulariza…
davidwalter2 Feb 16, 2026
01ebf6a
Merge branch 'main' of github.com:WMass/rabbit into 260202_regulariza…
davidwalter2 Mar 2, 2026
c801750
Update parser discriptions
davidwalter2 Mar 7, 2026
c532558
Merge branch 'main' of github.com:WMass/rabbit into 260202_regulariza…
davidwalter2 Mar 14, 2026
67c26f3
Putting safeguards for BB full
davidwalter2 Mar 15, 2026
52a8f42
Putting safeguards for BB full
davidwalter2 Mar 15, 2026
793f676
Add new tests
davidwalter2 Mar 16, 2026
04c3e88
Merge branch 'main' of github.com:WMass/rabbit into 260202_regulariza…
davidwalter2 Mar 17, 2026
4f2436c
Merge branch '260202_regularization' of github.com:davidwalter2/rabbi…
davidwalter2 Mar 17, 2026
800311e
Safeguard also for newton step
davidwalter2 Mar 17, 2026
a8a32a0
Merge branch '260315_fixBB' of github.com:davidwalter2/rabbit into 26…
davidwalter2 Mar 17, 2026
441f026
Add warning for BB full with gamma
davidwalter2 Mar 17, 2026
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
64 changes: 57 additions & 7 deletions .github/workflows/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -236,18 +236,35 @@ jobs:
rabbit_fit.py $RABBIT_OUTDIR/test_tensor.hdf5 -o $RABBIT_OUTDIR/
-t 0 --postfix blinded --setConstraintMinimum bkg_2_norm 0.5 --doImpacts --globalImpacts

- name: pseudodata fit
run: >-
rabbit_fit.py $RABBIT_OUTDIR/test_tensor.hdf5 -o $RABBIT_OUTDIR/ --postfix pseudodata
-t 0 --pseudoData original --doImpacts --globalImpacts --unblind ^sig$


alternative-fits:
runs-on: [self-hosted, linux, x64]
needs: [setenv, make-tensor]

steps:
- env:
RABBIT_OUTDIR: ${{ needs.setenv.outputs.RABBIT_OUTDIR }}
PYTHONPATH: ${{ needs.setenv.outputs.PYTHONPATH }}
PATH: ${{ needs.setenv.outputs.PATH }}
run: |
echo "RABBIT_OUTDIR=${RABBIT_OUTDIR}" >> $GITHUB_ENV
echo "PYTHONPATH=${PYTHONPATH}" >> $GITHUB_ENV
echo "PATH=${PATH}" >> $GITHUB_ENV

- uses: actions/checkout@v4

- name: sparse tensor fit
run: >-
rabbit_fit.py $RABBIT_OUTDIR/test_tensor_sparse.hdf5 -o $RABBIT_OUTDIR/ --postfix sparse
-t 0 --noBinByBinStat --doImpacts --globalImpacts --computeVariations
--saveHists --saveHistsPerProcess --computeHistErrors --computeHistErrorsPerProcess
-m Project ch1 a -m Project ch1 b

- name: pseudodata fit
run: >-
rabbit_fit.py $RABBIT_OUTDIR/test_tensor.hdf5 -o $RABBIT_OUTDIR/ --postfix pseudodata
-t 0 --pseudoData original --doImpacts --globalImpacts --unblind ^sig$

- name: covariance fit
run: >-
rabbit_fit.py $RABBIT_OUTDIR/test_tensor.hdf5 -o $RABBIT_OUTDIR/ --postfix covariance
Expand All @@ -262,11 +279,44 @@ jobs:
--saveHists --saveHistsPerProcess --computeHistErrors --computeHistErrorsPerProcess
--computeHistImpacts --computeHistCov -m Project ch1 a -m Project ch1 b

- name: chi2 fit with Barlow--Beeston full
- name: Barlow--Beeston full (gamma)
run: >-
rabbit_fit.py $RABBIT_OUTDIR/test_tensor_symmetric.hdf5 -o $RABBIT_OUTDIR/ --postfix nonlinear
-t 0 --doImpacts --globalImpacts --globalImpactsDisableJVP --binByBinStatType gamma --binByBinStatMode full
--saveHists --saveHistsPerProcess --computeHistErrors --computeHistErrorsPerProcess
--computeHistImpacts --computeHistCov -m Project ch1 a -m Project ch1 b

- name: chi2 fit with Barlow--Beeston full (normal-multiplicative)
run: >-
rabbit_fit.py $RABBIT_OUTDIR/test_tensor_symmetric.hdf5 -o $RABBIT_OUTDIR/ --postfix bb_full
-t 0 --globalImpacts --binByBinStatType normal-multiplicative --binByBinStatMode full

regularization:
runs-on: [self-hosted, linux, x64]
needs: [setenv, make-tensor]
steps:
- env:
RABBIT_OUTDIR: ${{ needs.setenv.outputs.RABBIT_OUTDIR }}
PYTHONPATH: ${{ needs.setenv.outputs.PYTHONPATH }}
PATH: ${{ needs.setenv.outputs.PATH }}
WEB_DIR: ${{ needs.setenv.outputs.WEB_DIR }}
PLOT_DIR: ${{ needs.setenv.outputs.PLOT_DIR }}
run: |
echo "RABBIT_OUTDIR=${RABBIT_OUTDIR}" >> $GITHUB_ENV
echo "PYTHONPATH=${PYTHONPATH}" >> $GITHUB_ENV
echo "PATH=${PATH}" >> $GITHUB_ENV
echo "WEB_DIR=${WEB_DIR}" >> $GITHUB_ENV
echo "PLOT_DIR=${PLOT_DIR}" >> $GITHUB_ENV

- name: lcurve scan
run: >-
rabbit_fit.py $RABBIT_OUTDIR/test_tensor.hdf5 -o $RABBIT_OUTDIR/ --postfix lcurve_scan
-t 0 --unblind -r SVD Select 'ch0_masked' -r SVD Select 'ch0_masked'--earlyStopping 15

- name: plot lcurve
run: >-
python tests/plot_epoch_loss_time.py results/fitresults_lcurve_scan.hdf5 -o $WEB_DIR/$PLOT_DIR --types lcurve --title Experiment --subtitle 'Work in progress'

bsm:
runs-on: [self-hosted, linux, x64]
needs: [setenv, make-tensor]
Expand Down Expand Up @@ -431,7 +481,7 @@ jobs:

copy-clean:
runs-on: [self-hosted, linux, x64]
needs: [setenv, symmerizations, bsm, plotting, likelihoodscans]
needs: [setenv, symmerizations, alternative-fits, bsm, regularization, plotting, likelihoodscans]
if: always()
steps:
- env:
Expand Down
58 changes: 53 additions & 5 deletions bin/rabbit_fit.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,8 @@
from rabbit.mappings import helpers as mh
from rabbit.mappings import mapping as mp
from rabbit.poi_models import helpers as ph
from rabbit.regularization import helpers as rh
from rabbit.regularization.lcurve import l_curve_optimize_tau, l_curve_scan_tau
from rabbit.tfhelpers import edmval_cov

from wums import output_tools, logging # isort: skip
Expand All @@ -21,7 +23,7 @@


def make_parser():
parser = parsing.common_parser()
parser = parsing.common_parser("Perform binned profilme maximum likelihood fits")
parser.add_argument("--outname", default="fitresults.hdf5", help="output file name")
parser.add_argument(
"--fullNll",
Expand Down Expand Up @@ -149,6 +151,12 @@ def make_parser():
type=str,
help="Specify result from external postfit file",
)
parser.add_argument(
"--noFit",
default=False,
action="store_true",
help="Do not not perform the minimization.",
)
parser.add_argument(
"--noPostfitProfileBB",
default=False,
Expand Down Expand Up @@ -182,7 +190,24 @@ def make_parser():
action="store_true",
help="compute impacts of frozen (non-profiled) systematics",
)

parser.add_argument(
"--lCurveScan",
default=False,
action="store_true",
help="For use with regularization, scan the L curve versus values for tau",
)
parser.add_argument(
"--lCurveOptimize",
default=False,
action="store_true",
help="For use with regularization, find the value of tau that maximizes the curvature",
)
parser.add_argument(
"--regularizationStrength",
default=0.0,
type=float,
help="For use with regularization, set the regularization strength (tau)",
)
return parser.parse_args()


Expand Down Expand Up @@ -281,7 +306,21 @@ def fit(args, fitter, ws, dofit=True):
edmval = None

if args.externalPostfit is not None:
fitter.load_fitresult(args.externalPostfit, args.externalPostfitResult)
fitter.load_fitresult(
args.externalPostfit,
args.externalPostfitResult,
profile=not args.noPostfitProfileBB,
)

if args.lCurveScan:
tau_values, l_curve_values = l_curve_scan_tau(fitter)
ws.add_1D_integer_hist(tau_values, "step", "tau")
ws.add_1D_integer_hist(l_curve_values, "step", "lcurve")

if args.lCurveOptimize:
best_tau, max_curvature = l_curve_optimize_tau(fitter)
ws.add_1D_integer_hist([best_tau], "best", "tau")
ws.add_1D_integer_hist([max_curvature], "best", "lcurve")

if dofit:
cb = fitter.minimize()
Expand All @@ -293,7 +332,8 @@ def fit(args, fitter, ws, dofit=True):
fitter._profile_beta()

if cb is not None:
ws.add_loss_time_hist(cb.loss_history, cb.time_history)
ws.add_1D_integer_hist(cb.loss_history, "epoch", "loss")
ws.add_1D_integer_hist(cb.time_history, "epoch", "time")

if not args.noHessian:
# compute the covariance matrix and estimated distance to minimum
Expand Down Expand Up @@ -477,6 +517,14 @@ def main():
mp.CompositeMapping(mappings),
]

ifitter.tau.assign(args.regularizationStrength)
regularizers = []
for margs in args.regularization:
mapping = mh.load_mapping(margs[1], indata, *margs[2:])
regularizer = rh.load_regularizer(margs[0], mapping, dtype=indata.dtype)
regularizers.append(regularizer)
ifitter.regularizers = regularizers

np.random.seed(args.seed)
tf.random.set_seed(args.seed)

Expand Down Expand Up @@ -560,7 +608,7 @@ def main():

if not args.prefitOnly:
ifitter.set_blinding_offsets(blind=blinded_fits[i])
fit(args, ifitter, ws, dofit=ifit >= 0)
fit(args, ifitter, ws, dofit=ifit >= 0 and not args.noFit)
fit_time.append(time.time())

if args.saveHists:
Expand Down
Loading
Loading