From 0b12df45bb004b8dce12973c89810f55107a907c Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 7 Oct 2024 17:31:00 +0000 Subject: [PATCH 1/2] chore: update pre-commit hooks MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/psf/black-pre-commit-mirror: 24.4.2 → 24.8.0](https://github.com/psf/black-pre-commit-mirror/compare/24.4.2...24.8.0) - [github.com/pre-commit/pre-commit-hooks: v4.6.0 → v5.0.0](https://github.com/pre-commit/pre-commit-hooks/compare/v4.6.0...v5.0.0) - [github.com/astral-sh/ruff-pre-commit: v0.5.0 → v0.6.9](https://github.com/astral-sh/ruff-pre-commit/compare/v0.5.0...v0.6.9) - [github.com/abravalheri/validate-pyproject: v0.18 → v0.20.2](https://github.com/abravalheri/validate-pyproject/compare/v0.18...v0.20.2) - [github.com/python-jsonschema/check-jsonschema: 0.28.6 → 0.29.3](https://github.com/python-jsonschema/check-jsonschema/compare/0.28.6...0.29.3) --- .pre-commit-config.yaml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index dea80fd6..32a2e4f9 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -5,7 +5,7 @@ ci: repos: - repo: https://github.com/psf/black-pre-commit-mirror - rev: "24.4.2" + rev: "24.8.0" hooks: - id: black-jupyter args: [--line-length=100] @@ -17,7 +17,7 @@ repos: additional_dependencies: [black==23.*] - repo: https://github.com/pre-commit/pre-commit-hooks - rev: "v4.6.0" + rev: "v5.0.0" hooks: - id: check-added-large-files args: ["--maxkb=2000"] @@ -48,7 +48,7 @@ repos: # args: [--prose-wrap=always] - repo: https://github.com/astral-sh/ruff-pre-commit - rev: "v0.5.0" + rev: "v0.6.9" hooks: - id: ruff args: ["--fix", "--show-fixes"] @@ -84,13 +84,13 @@ repos: # exclude: .pre-commit-config.yaml - repo: https://github.com/abravalheri/validate-pyproject - rev: "v0.18" + rev: "v0.20.2" hooks: - id: validate-pyproject additional_dependencies: ["validate-pyproject-schema-store[all]"] - repo: https://github.com/python-jsonschema/check-jsonschema - rev: "0.28.6" + rev: "0.29.3" hooks: - id: check-dependabot - id: check-github-workflows From 3786868feae71d89913e61faf788e351b6ca858a Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 7 Oct 2024 17:31:49 +0000 Subject: [PATCH 2/2] style: pre-commit fixes --- data/xhy/check_pfnanos.ipynb | 5 +- data/xhy/filter_samples.ipynb | 9 ++- data/xhy/parse_das.ipynb | 4 +- inference_scans/run_law.sh | 2 +- paper/latex_tables.ipynb | 25 +++---- paper/limit_plots.ipynb | 5 +- .../VBF_binder/VBFKinematicsStudy.ipynb | 21 ++---- .../VBF_binder/VBFKinematicsStudyRK.ipynb | 21 ++---- src/HHbbVV/VBF_binder/VBFVectorTesting.ipynb | 16 +++-- src/HHbbVV/VBF_binder/VBFgenInfoTests.ipynb | 38 ++++++----- src/HHbbVV/VBF_binder/VBFgenselection.ipynb | 23 +++---- .../VBFjetsVisualizePrototype.ipynb | 8 ++- src/HHbbVV/VBF_binder/plot1dhistscuts.ipynb | 20 +++--- src/HHbbVV/combine/binder/BiasTest.ipynb | 14 ++-- src/HHbbVV/combine/binder/FTest.ipynb | 16 +++-- src/HHbbVV/combine/binder/GoF.ipynb | 11 ++-- .../combine/binder/InterpolateLimits.ipynb | 17 +++-- src/HHbbVV/combine/binder/PlotScan.ipynb | 15 ++--- src/HHbbVV/corrections/TriggerSFs.ipynb | 15 +++-- src/HHbbVV/corrections/checkJMR.ipynb | 4 +- src/HHbbVV/corrections/corrections.ipynb | 5 +- src/HHbbVV/postprocessing/BDT_LPSFs.ipynb | 26 ++++---- .../postprocessing/CombineTemplates.ipynb | 23 ++++--- .../postprocessing/GetEventDisplayJets.ipynb | 7 +- .../postprocessing/InferenceAnalysis.ipynb | 25 ++++--- .../postprocessing/InterpolateSignal.ipynb | 25 +++---- src/HHbbVV/postprocessing/NormTest.ipynb | 21 +++--- src/HHbbVV/postprocessing/PlotFits.ipynb | 26 ++++---- src/HHbbVV/postprocessing/PlotFitsFitd.ipynb | 26 ++++---- src/HHbbVV/postprocessing/PlotFitsRes.ipynb | 17 +++-- src/HHbbVV/postprocessing/PostProcess.ipynb | 50 ++++++-------- .../postprocessing/PostProcessRes.ipynb | 53 ++++++--------- .../postprocessing/PostProcessResOld.ipynb | 51 +++++++------- .../postprocessing/PostProcessVBF.ipynb | 41 ++++-------- .../PostProcessVBFtesting.ipynb | 66 +++++++------------ src/HHbbVV/postprocessing/TTbarCheck.ipynb | 46 +++++-------- src/HHbbVV/postprocessing/TopAnalysis.ipynb | 19 +++--- .../postprocessing/TopAnalysisOld.ipynb | 19 +++--- src/HHbbVV/postprocessing/TriggerSFs.ipynb | 19 +++--- src/HHbbVV/postprocessing/VV_analysis.ipynb | 16 ++--- src/HHbbVV/postprocessing/check_outputs.ipynb | 7 +- src/HHbbVV/processors/GenSelection.ipynb | 23 ++----- .../processors/SemiResolvedVetoGenStudy.ipynb | 17 +---- src/HHbbVV/processors/debug_scores_rk.ipynb | 8 ++- .../resonant_studies/QuarkFractions.ipynb | 10 +-- src/HHbbVV/scale_factors/VV_reweighting.ipynb | 38 ++++------- src/HHbbVV/scale_factors/check_cuts.ipynb | 8 ++- src/HHbbVV/scale_factors/gen_clustering.ipynb | 24 +++---- .../scale_factors/top_reweighting.ipynb | 39 ++++------- src/HHbbVV/tagger/high_mass_check.ipynb | 8 +-- src/HHbbVV/triton/export_and_check.ipynb | 15 ++--- src/binder/VBFPlots.ipynb | 28 +++----- .../calcTriggerSFUncertainties-Jul21.ipynb | 4 +- src/binder/calcTriggerSFUncertainties.ipynb | 3 + ...lotJetHTTriggerEfficiencies_1D_Jul15.ipynb | 7 +- src/binder/plotTF.ipynb | 6 +- src/binder/plotTrigEffs.ipynb | 10 +-- src/runCoffeaCasa.ipynb | 29 ++++---- src/runJetHTTriggerEfficiencies.ipynb | 20 +++--- 59 files changed, 532 insertions(+), 642 deletions(-) diff --git a/data/xhy/check_pfnanos.ipynb b/data/xhy/check_pfnanos.ipynb index d3e5a034..198a9992 100644 --- a/data/xhy/check_pfnanos.ipynb +++ b/data/xhy/check_pfnanos.ipynb @@ -6,7 +6,10 @@ "metadata": {}, "outputs": [], "source": [ + "from __future__ import annotations\n", + "\n", "import os\n", + "\n", "import yaml" ] }, @@ -37,7 +40,7 @@ " for sample in os.listdir(f\"/eos/uscms/store/group/lpcpfnano/{user}/v2_3/{year}/XHY\")\n", " ]\n", "\n", - " with open(f\"parsed_miniaod_{year}.yaml\", \"r\") as f:\n", + " with open(f\"parsed_miniaod_{year}.yaml\") as f:\n", " mini_samples = list(yaml.safe_load(f)[\"samples\"][\"XHY\"][\"datasets\"].keys())\n", "\n", " for sample in mini_samples:\n", diff --git a/data/xhy/filter_samples.ipynb b/data/xhy/filter_samples.ipynb index 1cedc4bc..12b54188 100644 --- a/data/xhy/filter_samples.ipynb +++ b/data/xhy/filter_samples.ipynb @@ -6,8 +6,11 @@ "metadata": {}, "outputs": [], "source": [ - "import yaml\n", - "import os" + "from __future__ import annotations\n", + "\n", + "import os\n", + "\n", + "import yaml" ] }, { @@ -16,7 +19,7 @@ "metadata": {}, "outputs": [], "source": [ - "with open(\"parsed_miniaod_2017.yaml\", \"r\") as f:\n", + "with open(\"parsed_miniaod_2017.yaml\") as f:\n", " files = yaml.safe_load(f)" ] }, diff --git a/data/xhy/parse_das.ipynb b/data/xhy/parse_das.ipynb index f23d7626..702d2580 100644 --- a/data/xhy/parse_das.ipynb +++ b/data/xhy/parse_das.ipynb @@ -6,7 +6,9 @@ "metadata": {}, "outputs": [], "source": [ - "with open(\"./miniaod.txt\", \"r\") as f:\n", + "from __future__ import annotations\n", + "\n", + "with open(\"./miniaod.txt\") as f:\n", " samples = f.readlines()" ] }, diff --git a/inference_scans/run_law.sh b/inference_scans/run_law.sh index 86aab129..d348a397 100755 --- a/inference_scans/run_law.sh +++ b/inference_scans/run_law.sh @@ -218,4 +218,4 @@ if [ $impacts = 1 ]; then --page -1 \ --pull-range 3 \ --Snapshot-custom-args="$custom_args" -fi \ No newline at end of file +fi diff --git a/paper/latex_tables.ipynb b/paper/latex_tables.ipynb index 875c2b14..d4e3d0ab 100644 --- a/paper/latex_tables.ipynb +++ b/paper/latex_tables.ipynb @@ -6,12 +6,15 @@ "metadata": {}, "outputs": [], "source": [ - "import yaml\n", + "from __future__ import annotations\n", + "\n", "import json\n", + "from copy import deepcopy\n", + "from pathlib import Path\n", + "\n", "import numpy as np\n", "import pandas as pd\n", - "from copy import deepcopy\n", - "from pathlib import Path" + "import yaml" ] }, { @@ -45,7 +48,7 @@ "metadata": {}, "outputs": [], "source": [ - "with open(\"samples/JetHT.yaml\", \"r\") as f:\n", + "with open(\"samples/JetHT.yaml\") as f:\n", " samples = yaml.safe_load(f)[\"samples\"]\n", "\n", "lines = []\n", @@ -77,7 +80,7 @@ "metadata": {}, "outputs": [], "source": [ - "with open(\"samples/SingleMuon.yaml\", \"r\") as f:\n", + "with open(\"samples/SingleMuon.yaml\") as f:\n", " samples = yaml.safe_load(f)[\"samples\"]\n", "\n", "lines = []\n", @@ -164,10 +167,10 @@ "metadata": {}, "outputs": [], "source": [ - "with open(\"samples/MC_bg.yaml\", \"r\") as f:\n", + "with open(\"samples/MC_bg.yaml\") as f:\n", " samples = yaml.safe_load(f)[\"samples\"]\n", "\n", - "with open(\"../data/xsecs.json\", \"r\") as f:\n", + "with open(\"../data/xsecs.json\") as f:\n", " xsecs = json.load(f)\n", "\n", "lines = []\n", @@ -179,7 +182,7 @@ "\n", " for key, dataset in samples[sample][\"datasets\"].items():\n", " dname = dataset.split(\"/\")[1].replace(\"_\", \"\\_\")\n", - " citation = \"\" if not key in refs else rf\"~\\cite{{{refs[key]}}}\"\n", + " citation = \"\" if key not in refs else rf\"~\\cite{{{refs[key]}}}\"\n", " lines.append(rf\" & {dname} & {process_xsec(xsecs[key])}{citation} \\\\\" + \"\\n\")\n", "\n", " lend = len(samples[sample][\"datasets\"])\n", @@ -200,10 +203,10 @@ "metadata": {}, "outputs": [], "source": [ - "with open(\"samples/MC_sig.yaml\", \"r\") as f:\n", + "with open(\"samples/MC_sig.yaml\") as f:\n", " samples = yaml.safe_load(f)[\"samples\"]\n", "\n", - "with open(\"../data/xsecs.json\", \"r\") as f:\n", + "with open(\"../data/xsecs.json\") as f:\n", " xsecs = json.load(f)\n", "\n", "lines = []\n", @@ -247,7 +250,7 @@ " return (mX, mY)\n", "\n", "\n", - "with open(\"../data/xhy/parsed_miniaod_2017.yaml\", \"r\") as f:\n", + "with open(\"../data/xhy/parsed_miniaod_2017.yaml\") as f:\n", " samples = yaml.safe_load(f)[\"samples\"][\"XHY\"][\"datasets\"]\n", "\n", "mps = [[*mxmy(sample)] for sample in samples]\n", diff --git a/paper/limit_plots.ipynb b/paper/limit_plots.ipynb index b2961506..bee52493 100644 --- a/paper/limit_plots.ipynb +++ b/paper/limit_plots.ipynb @@ -6,9 +6,10 @@ "metadata": {}, "outputs": [], "source": [ - "import matplotlib as mpl\n", - "import matplotlib.pyplot as plt\n", + "from __future__ import annotations\n", + "\n", "import matplotlib.patches as patches\n", + "import matplotlib.pyplot as plt\n", "import matplotlib.ticker as mticker\n", "import mplhep as hep\n", "import numpy as np\n", diff --git a/src/HHbbVV/VBF_binder/VBFKinematicsStudy.ipynb b/src/HHbbVV/VBF_binder/VBFKinematicsStudy.ipynb index 7ae8b0d5..762516fd 100644 --- a/src/HHbbVV/VBF_binder/VBFKinematicsStudy.ipynb +++ b/src/HHbbVV/VBF_binder/VBFKinematicsStudy.ipynb @@ -7,22 +7,15 @@ "metadata": {}, "outputs": [], "source": [ - "import pandas as pd\n", + "from __future__ import annotations\n", + "\n", + "import awkward as ak\n", "import matplotlib.pyplot as plt\n", - "import mplhep as hep\n", "import matplotlib.ticker as mticker\n", + "import mplhep as hep\n", "import numpy as np\n", - "\n", - "import uproot\n", - "import awkward as ak\n", "from coffea import nanoevents\n", "\n", - "from coffea.nanoevents.methods.base import NanoEventsArray\n", - "from coffea.analysis_tools import Weights, PackedSelection\n", - "from coffea.nanoevents.methods import nanoaod\n", - "from coffea.nanoevents.methods import vector\n", - "from coffea.lookup_tools.dense_lookup import dense_lookup\n", - "\n", "from HHbbVV.processors.utils import pad_val\n", "\n", "plt.style.use(hep.style.CMS)\n", @@ -77,7 +70,7 @@ " (abs(events.GenPart.pdgId) == HIGGS_PDGID) * events.GenPart.hasFlags(GEN_FLAGS)\n", "]\n", "\n", - "vs = events.GenPart[((abs(events.GenPart.pdgId) == 24)) * events.GenPart.hasFlags(GEN_FLAGS)]" + "vs = events.GenPart[(abs(events.GenPart.pdgId) == 24) * events.GenPart.hasFlags(GEN_FLAGS)]" ] }, { @@ -95,7 +88,7 @@ "metadata": {}, "outputs": [], "source": [ - "ak4_jet_selection = { # noqa: RUF012\n", + "ak4_jet_selection = {\n", " \"pt\": 25,\n", " \"eta\": 2.7,\n", " \"jetId\": \"tight\",\n", @@ -105,7 +98,7 @@ "}\n", "\n", "# ak8 jet preselection\n", - "preselection = { # noqa: RUF012\n", + "preselection = {\n", " \"pt\": 300.0,\n", " \"eta\": 2.4,\n", " \"VVmsd\": 50,\n", diff --git a/src/HHbbVV/VBF_binder/VBFKinematicsStudyRK.ipynb b/src/HHbbVV/VBF_binder/VBFKinematicsStudyRK.ipynb index 5d8379c3..7b1cf237 100644 --- a/src/HHbbVV/VBF_binder/VBFKinematicsStudyRK.ipynb +++ b/src/HHbbVV/VBF_binder/VBFKinematicsStudyRK.ipynb @@ -17,21 +17,14 @@ "metadata": {}, "outputs": [], "source": [ - "import pandas as pd\n", + "from __future__ import annotations\n", + "\n", + "import awkward as ak\n", "import matplotlib.pyplot as plt\n", - "import mplhep as hep\n", "import matplotlib.ticker as mticker\n", + "import mplhep as hep\n", "import numpy as np\n", - "\n", - "import uproot\n", - "import awkward as ak\n", "from coffea import nanoevents\n", - "\n", - "from coffea.nanoevents.methods.base import NanoEventsArray\n", - "from coffea.analysis_tools import Weights, PackedSelection\n", - "from coffea.nanoevents.methods import nanoaod\n", - "from coffea.nanoevents.methods import vector\n", - "from coffea.lookup_tools.dense_lookup import dense_lookup\n", "from tqdm import tqdm\n", "\n", "from HHbbVV.processors.utils import pad_val\n", @@ -145,7 +138,7 @@ "Hbb = ak.pad_none(Hbb, 1, axis=1)[:, 0]\n", "HVV = ak.pad_none(HVV, 1, axis=1)[:, 0]\n", "\n", - "vs = events.GenPart[((abs(events.GenPart.pdgId) == 24)) * events.GenPart.hasFlags(GEN_FLAGS)]\n", + "vs = events.GenPart[(abs(events.GenPart.pdgId) == 24) * events.GenPart.hasFlags(GEN_FLAGS)]\n", "\n", "# vbf output quarks are always at index 4, 5\n", "gen_quarks = events.GenPart[events.GenPart.hasFlags([\"isHardProcess\"])][:, 4:6]" @@ -275,7 +268,7 @@ "outputs": [], "source": [ "# ak8 jet preselection\n", - "preselection = { # noqa: RUF012\n", + "preselection = {\n", " \"pt\": 300.0,\n", " \"eta\": 2.4,\n", " \"VVmsd\": 50,\n", @@ -479,7 +472,7 @@ "metadata": {}, "outputs": [], "source": [ - "ak4_jet_selection = { # noqa: RUF012\n", + "ak4_jet_selection = {\n", " \"pt\": 15,\n", " \"eta_min\": 1,\n", " \"eta_max\": 5.1,\n", diff --git a/src/HHbbVV/VBF_binder/VBFVectorTesting.ipynb b/src/HHbbVV/VBF_binder/VBFVectorTesting.ipynb index 0fb4b4e5..6396bcb4 100644 --- a/src/HHbbVV/VBF_binder/VBFVectorTesting.ipynb +++ b/src/HHbbVV/VBF_binder/VBFVectorTesting.ipynb @@ -13,10 +13,13 @@ "metadata": {}, "outputs": [], "source": [ - "import vector\n", - "import numpy as np\n", + "from __future__ import annotations\n", + "\n", "import time\n", "\n", + "import numpy as np\n", + "import vector\n", + "\n", "start_time = time.time()\n", "\n", "# Generating dummy data for vbf1\n", @@ -124,8 +127,8 @@ "metadata": {}, "outputs": [], "source": [ - "import numpy as np\n", "import matplotlib.pyplot as plt\n", + "import numpy as np\n", "\n", "\n", "def to_four_momentum(pt, phi, eta, m):\n", @@ -209,7 +212,6 @@ "outputs": [], "source": [ "import numpy as np\n", - "import matplotlib.pyplot as plt\n", "\n", "\n", "def to_four_momentum(pt, phi, eta, m):\n", @@ -339,7 +341,6 @@ "outputs": [], "source": [ "import numpy as np\n", - "import matplotlib.pyplot as plt\n", "import vector\n", "\n", "\n", @@ -506,9 +507,10 @@ "metadata": {}, "outputs": [], "source": [ - "import numpy as np\n", "import time\n", "\n", + "import numpy as np\n", + "\n", "\n", "def to_four_momentum(pt, phi, eta, m):\n", " px = pt * np.cos(phi)\n", @@ -590,8 +592,8 @@ "metadata": {}, "outputs": [], "source": [ - "import vector\n", "import numpy as np\n", + "import vector\n", "\n", "\n", "# Define a function to convert pt, eta, phi, mass to a vector object\n", diff --git a/src/HHbbVV/VBF_binder/VBFgenInfoTests.ipynb b/src/HHbbVV/VBF_binder/VBFgenInfoTests.ipynb index 4fc6a963..75af0bf9 100644 --- a/src/HHbbVV/VBF_binder/VBFgenInfoTests.ipynb +++ b/src/HHbbVV/VBF_binder/VBFgenInfoTests.ipynb @@ -7,11 +7,13 @@ "metadata": {}, "outputs": [], "source": [ - "import pandas as pd\n", + "from __future__ import annotations\n", + "\n", "import matplotlib.pyplot as plt\n", - "import mplhep as hep\n", "import matplotlib.ticker as mticker\n", + "import mplhep as hep\n", "import numpy as np\n", + "import pandas as pd\n", "\n", "plt.style.use(hep.style.CMS)\n", "hep.style.use(\"CMS\")\n", @@ -100,11 +102,10 @@ "metadata": {}, "outputs": [], "source": [ - "import pandas as pd\n", - "import numpy as np\n", + "\n", "import matplotlib.pyplot as plt\n", - "from PIL import Image\n", - "import io\n", + "import numpy as np\n", + "import pandas as pd\n", "\n", "\n", "# Define a function to calculate delta R\n", @@ -2188,8 +2189,9 @@ "outputs": [], "source": [ "import re\n", - "import numpy as np\n", + "\n", "import matplotlib.pyplot as plt\n", + "import numpy as np\n", "\n", "\n", "def parse_log_string(log_string):\n", @@ -2233,7 +2235,7 @@ "\n", "\n", "def parse_log_file(file_path):\n", - " with open(file_path, \"r\") as file:\n", + " with open(file_path) as file:\n", " lines = file.readlines()\n", "\n", " data = []\n", @@ -2455,7 +2457,7 @@ "\n", "def read_data(filename):\n", " data = []\n", - " with open(filename, \"r\") as file:\n", + " with open(filename) as file:\n", " for line in file:\n", " # Remove brackets and newline, and split by comma\n", " items = line.replace(\"[\", \"\").replace(\"]\", \"\").replace(\"\\n\", \"\").split(\",\")\n", @@ -2485,7 +2487,7 @@ "source": [ "def process_and_plot_refactored(filename):\n", " # Load the data from the file\n", - " with open(filename, \"r\") as f:\n", + " with open(filename) as f:\n", " lines = f.readlines()\n", "\n", " # Extract the delta R values from the lines\n", @@ -2586,7 +2588,7 @@ "\n", "def plot_2d_histograms_from_file(filename):\n", " # Load the data from the file\n", - " with open(filename, \"r\") as f:\n", + " with open(filename) as f:\n", " lines = f.readlines()\n", "\n", " # Extract the values from the lines\n", @@ -2665,7 +2667,7 @@ "source": [ "def plot_1d_histograms_from_file(filename):\n", " # Load the data from the file\n", - " with open(filename, \"r\") as f:\n", + " with open(filename) as f:\n", " lines = f.readlines()\n", "\n", " # Extract the values from the lines\n", @@ -2733,7 +2735,7 @@ "source": [ "def plot_2d_histograms_with_score(filename):\n", " # Load the data from the file\n", - " with open(filename, \"r\") as f:\n", + " with open(filename) as f:\n", " lines = f.readlines()\n", "\n", " # Extract the values from the lines\n", @@ -2790,8 +2792,8 @@ "metadata": {}, "outputs": [], "source": [ - "import numpy as np\n", "import matplotlib.pyplot as plt\n", + "import numpy as np\n", "import pandas as pd\n", "\n", "df = pd.read_parquet(\"0-30.parquet\")\n", @@ -2891,8 +2893,8 @@ "metadata": {}, "outputs": [], "source": [ - "import numpy as np\n", "import matplotlib.pyplot as plt\n", + "import numpy as np\n", "\n", "df = pd.read_parquet(\"0-30.parquet\")\n", "\n", @@ -3041,8 +3043,8 @@ "metadata": {}, "outputs": [], "source": [ - "import numpy as np # turn this into R1 R2 graphs\n", "import matplotlib.pyplot as plt\n", + "import numpy as np # turn this into R1 R2 graphs\n", "\n", "df = pd.read_parquet(\"0-30.parquet\")\n", "\n", @@ -3248,9 +3250,8 @@ "metadata": {}, "outputs": [], "source": [ - "import numpy as np\n", "import matplotlib.pyplot as plt\n", - "import re\n", + "import numpy as np\n", "\n", "\n", "def count_matched_jets(df):\n", @@ -3318,6 +3319,7 @@ { "cell_type": "code", "execution_count": null, + "id": "7fb27b941602401d91542211134fc71a", "metadata": {}, "outputs": [], "source": [] diff --git a/src/HHbbVV/VBF_binder/VBFgenselection.ipynb b/src/HHbbVV/VBF_binder/VBFgenselection.ipynb index 3cd2e5b4..d355be8e 100644 --- a/src/HHbbVV/VBF_binder/VBFgenselection.ipynb +++ b/src/HHbbVV/VBF_binder/VBFgenselection.ipynb @@ -7,20 +7,14 @@ "metadata": {}, "outputs": [], "source": [ - "import pandas as pd\n", - "import matplotlib.pyplot as plt\n", - "import mplhep as hep\n", - "import matplotlib.ticker as mticker\n", - "import numpy as np\n", + "from __future__ import annotations\n", "\n", - "import uproot\n", "import awkward as ak\n", + "import matplotlib.pyplot as plt\n", + "import matplotlib.ticker as mticker\n", + "import mplhep as hep\n", + "import pandas as pd\n", "from coffea import nanoevents\n", - "from coffea.nanoevents.methods.base import NanoEventsArray\n", - "from coffea.analysis_tools import Weights, PackedSelection\n", - "from coffea.nanoevents.methods import nanoaod\n", - "from coffea.nanoevents.methods import vector\n", - "from coffea.lookup_tools.dense_lookup import dense_lookup\n", "\n", "plt.style.use(hep.style.CMS)\n", "hep.style.use(\"CMS\")\n", @@ -71,7 +65,7 @@ " (abs(events.GenPart.pdgId) == HIGGS_PDGID) * events.GenPart.hasFlags(GEN_FLAGS)\n", "]\n", "\n", - "vbfs = events.GenPart[((abs(events.GenPart.pdgId) == 24)) * events.GenPart.hasFlags(GEN_FLAGS)]\n", + "vbfs = events.GenPart[(abs(events.GenPart.pdgId) == 24) * events.GenPart.hasFlags(GEN_FLAGS)]\n", "\n", "\n", "print(ak.sum(ak.num(events.GenPart, axis=1)))\n", @@ -172,6 +166,7 @@ { "cell_type": "code", "execution_count": null, + "id": "7fb27b941602401d91542211134fc71a", "metadata": {}, "outputs": [], "source": [ @@ -887,7 +882,6 @@ "source": [ "import pandas as pd\n", "\n", - "\n", "# Prepare lists to populate DataFrame\n", "sort_types = []\n", "matches = []\n", @@ -1040,7 +1034,7 @@ "outputs": [], "source": [ "# Import the necessary libraries\n", - "from IPython.display import display, HTML\n", + "from IPython.display import HTML, display\n", "\n", "# Set the display options\n", "pd.set_option(\"display.max_columns\", None)\n", @@ -1291,7 +1285,6 @@ "source": [ "import pandas as pd\n", "\n", - "\n", "data_rows = []\n", "for sorttype, sortdata in output_dict.items():\n", " for true_val, truedata in sortdata.items():\n", diff --git a/src/HHbbVV/VBF_binder/VBFjetsVisualizePrototype.ipynb b/src/HHbbVV/VBF_binder/VBFjetsVisualizePrototype.ipynb index d35b53bb..b50ff7b1 100644 --- a/src/HHbbVV/VBF_binder/VBFjetsVisualizePrototype.ipynb +++ b/src/HHbbVV/VBF_binder/VBFjetsVisualizePrototype.ipynb @@ -7,11 +7,13 @@ "metadata": {}, "outputs": [], "source": [ - "import pandas as pd\n", + "from __future__ import annotations\n", + "\n", "import matplotlib.pyplot as plt\n", - "import mplhep as hep\n", "import matplotlib.ticker as mticker\n", + "import mplhep as hep\n", "import numpy as np\n", + "import pandas as pd\n", "\n", "plt.style.use(hep.style.CMS)\n", "hep.style.use(\"CMS\")\n", @@ -394,7 +396,7 @@ "\n", " # Add custom variables if provided\n", " if custom_variables:\n", - " for sort_type in variables_dict.keys():\n", + " for sort_type in variables_dict:\n", " variables_dict[sort_type] += custom_variables\n", "\n", " # Determine grid size\n", diff --git a/src/HHbbVV/VBF_binder/plot1dhistscuts.ipynb b/src/HHbbVV/VBF_binder/plot1dhistscuts.ipynb index c4481cd5..8379bca3 100644 --- a/src/HHbbVV/VBF_binder/plot1dhistscuts.ipynb +++ b/src/HHbbVV/VBF_binder/plot1dhistscuts.ipynb @@ -6,21 +6,19 @@ "metadata": {}, "outputs": [], "source": [ + "from __future__ import annotations\n", + "\n", "import sys\n", - "import os\n", "\n", "sys.path.append(\"/home/users/annava/projects/HHbbVV/src/HHbbVV/postprocessing/\")\n", "from collections import OrderedDict\n", - "import utils\n", - "import postprocessing\n", - "import pandas as pd\n", + "\n", "import matplotlib.pyplot as plt\n", - "import mplhep as hep\n", "import matplotlib.ticker as mticker\n", + "import mplhep as hep\n", "import numpy as np\n", - "import pickle\n", - "import os\n", - "import json\n", + "import postprocessing\n", + "import utils\n", "\n", "plt.style.use(hep.style.CMS)\n", "hep.style.use(\"CMS\")\n", @@ -157,7 +155,6 @@ "metadata": {}, "outputs": [], "source": [ - "import numpy as np\n", "import matplotlib.pyplot as plt\n", "\n", "\n", @@ -289,7 +286,7 @@ " overall_significance = np.round(overall_significance, -int(n - 1))\n", "\n", " # Convert to scientific notation\n", - " overall_significance_sci = \"{:e}\".format(overall_significance)\n", + " overall_significance_sci = f\"{overall_significance:e}\"\n", " ax[1].step(\n", " bins[:-1],\n", " sf * significance,\n", @@ -432,7 +429,6 @@ "metadata": {}, "outputs": [], "source": [ - "import numpy as np\n", "import matplotlib.pyplot as plt\n", "\n", "# Assuming plot_cut_histogram function is defined as before, with the additional 'title' and 'cuts' parameters\n", @@ -458,7 +454,7 @@ "for param, values in cut_parameters.items():\n", " for value in values:\n", " # Set only the current cut parameter to the linspace value, others to zero\n", - " cuts = {k: 0 for k in cut_parameters.keys()}\n", + " cuts = {k: 0 for k in cut_parameters}\n", " cuts[param] = value\n", "\n", " # Call the plot function\n", diff --git a/src/HHbbVV/combine/binder/BiasTest.ipynb b/src/HHbbVV/combine/binder/BiasTest.ipynb index 766fc31e..b856682b 100644 --- a/src/HHbbVV/combine/binder/BiasTest.ipynb +++ b/src/HHbbVV/combine/binder/BiasTest.ipynb @@ -6,13 +6,15 @@ "metadata": {}, "outputs": [], "source": [ - "from typing import List\n", - "import uproot\n", - "import numpy as np\n", + "from __future__ import annotations\n", + "\n", + "from pathlib import Path\n", + "\n", "import matplotlib.pyplot as plt\n", - "import mplhep as hep\n", "import matplotlib.ticker as mticker\n", - "from pathlib import Path\n", + "import mplhep as hep\n", + "import numpy as np\n", + "import uproot\n", "\n", "plt.style.use(hep.style.CMS)\n", "hep.style.use(\"CMS\")\n", @@ -324,7 +326,7 @@ "metadata": {}, "outputs": [], "source": [ - "from ipywidgets import interact, FloatSlider\n", + "from ipywidgets import FloatSlider, interact\n", "\n", "\n", "def phist(cut):\n", diff --git a/src/HHbbVV/combine/binder/FTest.ipynb b/src/HHbbVV/combine/binder/FTest.ipynb index 1610822a..432b30d4 100644 --- a/src/HHbbVV/combine/binder/FTest.ipynb +++ b/src/HHbbVV/combine/binder/FTest.ipynb @@ -6,16 +6,20 @@ "metadata": {}, "outputs": [], "source": [ + "from __future__ import annotations\n", + "\n", + "from pathlib import Path\n", "from typing import List\n", - "import uproot\n", - "import numpy as np\n", + "\n", "import matplotlib.pyplot as plt\n", - "import mplhep as hep\n", "import matplotlib.ticker as mticker\n", - "from pathlib import Path\n", - "from HHbbVV.postprocessing import utils\n", + "import mplhep as hep\n", + "import numpy as np\n", + "import uproot\n", "from scipy import stats\n", "\n", + "from HHbbVV.postprocessing import utils\n", + "\n", "plt.style.use(hep.style.CMS)\n", "hep.style.use(\"CMS\")\n", "formatter = mticker.ScalarFormatter(useMathText=True)\n", @@ -105,7 +109,7 @@ " )\n", " tdict[\"data\"][tflabel] = file[\"limit\"][0]\n", "\n", - " if not nTF == o1:\n", + " if nTF != o1:\n", " tdict[\"ftoys\"][tflabel] = F_statistic(\n", " tdict[\"toys\"][tlabel], tdict[\"toys\"][tflabel], o1, nTF\n", " )\n", diff --git a/src/HHbbVV/combine/binder/GoF.ipynb b/src/HHbbVV/combine/binder/GoF.ipynb index cb090c0f..a8c0d95f 100644 --- a/src/HHbbVV/combine/binder/GoF.ipynb +++ b/src/HHbbVV/combine/binder/GoF.ipynb @@ -6,13 +6,16 @@ "metadata": {}, "outputs": [], "source": [ + "from __future__ import annotations\n", + "\n", + "import os\n", "from typing import List\n", - "import uproot\n", - "import numpy as np\n", + "\n", "import matplotlib.pyplot as plt\n", - "import mplhep as hep\n", "import matplotlib.ticker as mticker\n", - "import os\n", + "import mplhep as hep\n", + "import numpy as np\n", + "import uproot\n", "\n", "plt.style.use(hep.style.CMS)\n", "hep.style.use(\"CMS\")\n", diff --git a/src/HHbbVV/combine/binder/InterpolateLimits.ipynb b/src/HHbbVV/combine/binder/InterpolateLimits.ipynb index 7c70a268..85a80996 100644 --- a/src/HHbbVV/combine/binder/InterpolateLimits.ipynb +++ b/src/HHbbVV/combine/binder/InterpolateLimits.ipynb @@ -6,19 +6,18 @@ "metadata": {}, "outputs": [], "source": [ - "import pickle\n", + "from __future__ import annotations\n", + "\n", "import os\n", "from pathlib import Path\n", "\n", - "import pandas as pd\n", - "import numpy as np\n", - "from scipy import interpolate\n", - "\n", "import matplotlib\n", "import matplotlib.pyplot as plt\n", - "import mplhep as hep\n", "import matplotlib.ticker as mticker\n", - "\n", + "import mplhep as hep\n", + "import numpy as np\n", + "import pandas as pd\n", + "from scipy import interpolate\n", "from tqdm import tqdm\n", "\n", "plt.style.use(hep.style.CMS)\n", @@ -80,7 +79,7 @@ " limits_path = f\"{cards_dir}/{sample}/AsymptoticLimits.txt\"\n", " if os.path.exists(limits_path):\n", " mx, my = mxmy(sample)\n", - " with open(limits_path, \"r\") as f:\n", + " with open(limits_path) as f:\n", " lines = f.readlines()\n", "\n", " nums = 0\n", @@ -198,7 +197,7 @@ " label = (\n", " f\"{key}% expected exclusion limits (fb)\"\n", " if key != \"50.0\"\n", - " else f\"Median expected exclusion limits (fb)\"\n", + " else \"Median expected exclusion limits (fb)\"\n", " )\n", " colormesh(xx, yy, grid, label, f\"{plot_dir}/mesh_{key}_turbo.pdf\")" ] diff --git a/src/HHbbVV/combine/binder/PlotScan.ipynb b/src/HHbbVV/combine/binder/PlotScan.ipynb index 71d79211..69bcc24d 100644 --- a/src/HHbbVV/combine/binder/PlotScan.ipynb +++ b/src/HHbbVV/combine/binder/PlotScan.ipynb @@ -6,13 +6,12 @@ "metadata": {}, "outputs": [], "source": [ - "import pandas as pd\n", - "import numpy as np\n", + "from __future__ import annotations\n", "\n", "import matplotlib.pyplot as plt\n", - "import mplhep as hep\n", "import matplotlib.ticker as mticker\n", - "from matplotlib.colors import LogNorm\n", + "import mplhep as hep\n", + "import numpy as np\n", "\n", "plt.rcParams.update({\"font.size\": 16})\n", "plt.style.use(hep.style.CMS)\n", @@ -20,14 +19,12 @@ "formatter = mticker.ScalarFormatter(useMathText=True)\n", "formatter.set_powerlimits((-3, 3))\n", "\n", - "from typing import List\n", + "import itertools\n", "import os\n", + "import warnings\n", "from pathlib import Path\n", "\n", "from tqdm import tqdm\n", - "import itertools\n", - "\n", - "import warnings\n", "\n", "warnings.filterwarnings(\"ignore\", message=\"invalid value encountered in log10*\")" ] @@ -161,7 +158,7 @@ " limits_path = cards_dir / cutstr / sample / \"AsymptoticLimits.txt\"\n", "\n", " if os.path.exists(limits_path):\n", - " with open(limits_path, \"r\") as f:\n", + " with open(limits_path) as f:\n", " lines = f.readlines()\n", "\n", " nums = 0\n", diff --git a/src/HHbbVV/corrections/TriggerSFs.ipynb b/src/HHbbVV/corrections/TriggerSFs.ipynb index 0a06b463..9b2fffda 100644 --- a/src/HHbbVV/corrections/TriggerSFs.ipynb +++ b/src/HHbbVV/corrections/TriggerSFs.ipynb @@ -6,16 +6,17 @@ "metadata": {}, "outputs": [], "source": [ - "import awkward as ak\n", - "import numpy as np\n", - "import hist\n", - "from hist import Hist\n", - "import matplotlib.pyplot as plt\n", - "import mplhep as hep\n", - "import pickle\n", + "from __future__ import annotations\n", + "\n", "import math\n", "import os\n", + "import pickle\n", + "\n", + "import matplotlib.pyplot as plt\n", + "import mplhep as hep\n", + "import numpy as np\n", "from hist.intervals import clopper_pearson_interval\n", + "\n", "import HHbbVV.common_utils as utils\n", "from HHbbVV import hh_vars\n", "\n", diff --git a/src/HHbbVV/corrections/checkJMR.ipynb b/src/HHbbVV/corrections/checkJMR.ipynb index 9efaff44..0b6d1889 100644 --- a/src/HHbbVV/corrections/checkJMR.ipynb +++ b/src/HHbbVV/corrections/checkJMR.ipynb @@ -6,10 +6,12 @@ "metadata": {}, "outputs": [], "source": [ - "import numpy as np\n", + "from __future__ import annotations\n", + "\n", "import matplotlib.pyplot as plt\n", "import matplotlib.ticker as mticker\n", "import mplhep as hep\n", + "import numpy as np\n", "\n", "plt.style.use(hep.style.CMS)\n", "hep.style.use(\"CMS\")\n", diff --git a/src/HHbbVV/corrections/corrections.ipynb b/src/HHbbVV/corrections/corrections.ipynb index ce59e254..7d4d40a9 100644 --- a/src/HHbbVV/corrections/corrections.ipynb +++ b/src/HHbbVV/corrections/corrections.ipynb @@ -6,6 +6,8 @@ "metadata": {}, "outputs": [], "source": [ + "from __future__ import annotations\n", + "\n", "import pickle" ] }, @@ -90,7 +92,8 @@ "metadata": {}, "outputs": [], "source": [ - "import json, gzip\n", + "import gzip\n", + "import json\n", "\n", "with gzip.open(get_pog_json(\"jec\", \"2017\"), \"r\") as fin:\n", " jec = json.loads(fin.read().decode(\"utf-8\"))\n", diff --git a/src/HHbbVV/postprocessing/BDT_LPSFs.ipynb b/src/HHbbVV/postprocessing/BDT_LPSFs.ipynb index 9b60b59e..28ac1f92 100644 --- a/src/HHbbVV/postprocessing/BDT_LPSFs.ipynb +++ b/src/HHbbVV/postprocessing/BDT_LPSFs.ipynb @@ -6,27 +6,27 @@ "metadata": {}, "outputs": [], "source": [ - "import utils\n", - "import plotting\n", - "import postprocessing\n", - "import numpy as np\n", + "from __future__ import annotations\n", + "\n", + "import os\n", "import warnings\n", + "\n", + "import numpy as np\n", "import pandas as pd\n", + "import postprocessing\n", + "import utils\n", + "from hh_vars import samples, sig_key\n", "from pandas.errors import SettingWithCopyWarning\n", - "from hh_vars import samples, sig_key, qcd_key, data_key\n", - "import os\n", "from utils import CUT_MAX_VAL\n", "\n", "# ignore these because they don't seem to apply\n", "warnings.simplefilter(action=\"ignore\", category=SettingWithCopyWarning)\n", "\n", - "from PyPDF2 import PdfFileMerger\n", "\n", - "from copy import deepcopy\n", "\n", "import matplotlib.pyplot as plt\n", - "import mplhep as hep\n", "import matplotlib.ticker as mticker\n", + "import mplhep as hep\n", "\n", "plt.style.use(hep.style.CMS)\n", "hep.style.use(\"CMS\")\n", @@ -309,16 +309,16 @@ " \"syst_rat_unc\": np.minimum(nom_vals, (np.abs(sys_up_down[0] - sys_up_down[1])) / 2) / nom_vals,\n", "}\n", "\n", - "tot_matched = np.sum(np.sum(events[f\"ak8FatJetHVV\"].astype(bool)))\n", + "tot_matched = np.sum(np.sum(events[\"ak8FatJetHVV\"].astype(bool)))\n", "\n", "# fraction of subjets > 350 * 0.21 measured by CASE\n", - "uncs[\"sj_pt_unc\"] = (np.sum(events[f\"VV_lp_sf_num_sjpt_gt350\"][0]) / tot_matched) * 0.21\n", + "uncs[\"sj_pt_unc\"] = (np.sum(events[\"VV_lp_sf_num_sjpt_gt350\"][0]) / tot_matched) * 0.21\n", "\n", "num_prongs = events[\"ak8FatJetHVVNumProngs\"][0]\n", "\n", - "sj_matching_unc = np.sum(events[f\"VV_lp_sf_double_matched_event\"][0])\n", + "sj_matching_unc = np.sum(events[\"VV_lp_sf_double_matched_event\"][0])\n", "for nump in range(2, 5):\n", - " sj_matching_unc += np.sum(events[f\"VV_lp_sf_unmatched_quarks\"][0][num_prongs == nump]) / nump\n", + " sj_matching_unc += np.sum(events[\"VV_lp_sf_unmatched_quarks\"][0][num_prongs == nump]) / nump\n", "\n", "uncs[\"sj_matching_unc\"] = sj_matching_unc / tot_matched" ] diff --git a/src/HHbbVV/postprocessing/CombineTemplates.ipynb b/src/HHbbVV/postprocessing/CombineTemplates.ipynb index e056efce..04a052ba 100644 --- a/src/HHbbVV/postprocessing/CombineTemplates.ipynb +++ b/src/HHbbVV/postprocessing/CombineTemplates.ipynb @@ -17,9 +17,12 @@ "metadata": {}, "outputs": [], "source": [ - "import pickle\n", + "from __future__ import annotations\n", + "\n", "import json\n", + "import pickle\n", "from pathlib import Path\n", + "\n", "from HHbbVV.hh_vars import years" ] }, @@ -167,13 +170,18 @@ "metadata": {}, "outputs": [], "source": [ + "import warnings\n", + "from pathlib import Path\n", + "\n", + "import datacardHelpers\n", "import hist\n", + "import plotting\n", + "import postprocessing\n", + "import regions\n", + "import utils\n", "from hist import Hist\n", - "import numpy as np\n", - "from HHbbVV.hh_vars import jecs, jmsr, bg_keys, qcd_key, LUMI\n", - "import utils, plotting, postprocessing, regions, datacardHelpers\n", - "import warnings\n", - "from pathlib import Path" + "\n", + "from HHbbVV.hh_vars import bg_keys, jecs, jmsr, qcd_key" ] }, { @@ -184,7 +192,7 @@ "source": [ "MAIN_DIR = Path(\"../../../\")\n", "\n", - "main_plot_dir = MAIN_DIR / f\"plots/PostProcessing/24Apr12NonresCombinedggFOR/Templates\"" + "main_plot_dir = MAIN_DIR / \"plots/PostProcessing/24Apr12NonresCombinedggFOR/Templates\"" ] }, { @@ -219,7 +227,6 @@ "metadata": {}, "outputs": [], "source": [ - "import matplotlib as mpl\n", "import matplotlib.pyplot as plt\n", "import matplotlib.ticker as mticker\n", "import mplhep as hep\n", diff --git a/src/HHbbVV/postprocessing/GetEventDisplayJets.ipynb b/src/HHbbVV/postprocessing/GetEventDisplayJets.ipynb index 22afd99c..9248921a 100644 --- a/src/HHbbVV/postprocessing/GetEventDisplayJets.ipynb +++ b/src/HHbbVV/postprocessing/GetEventDisplayJets.ipynb @@ -6,11 +6,12 @@ "metadata": {}, "outputs": [], "source": [ + "from __future__ import annotations\n", + "\n", + "import numpy as np\n", "import uproot\n", - "from tqdm import tqdm\n", "from coffea import nanoevents\n", - "from coffea.nanoevents.methods.base import NanoEventsArray\n", - "import numpy as np" + "from tqdm import tqdm" ] }, { diff --git a/src/HHbbVV/postprocessing/InferenceAnalysis.ipynb b/src/HHbbVV/postprocessing/InferenceAnalysis.ipynb index f6161a31..ca6f6e76 100644 --- a/src/HHbbVV/postprocessing/InferenceAnalysis.ipynb +++ b/src/HHbbVV/postprocessing/InferenceAnalysis.ipynb @@ -16,26 +16,32 @@ "metadata": {}, "outputs": [], "source": [ - "import numpy as np\n", - "import pandas as pd\n", + "from __future__ import annotations\n", + "\n", "import matplotlib.pyplot as plt\n", "import mplhep as hep\n", - "import matplotlib\n", + "import numpy as np\n", + "import pandas as pd\n", "\n", "plt.style.use(hep.style.CMS)\n", "hep.style.use(\"CMS\")\n", "plt.rcParams.update({\"font.size\": 24})\n", "\n", - "import os\n", - "from os import listdir\n", - "from os.path import exists\n", "import pickle\n", "from pathlib import Path\n", "\n", + "import plotting\n", + "import postprocessing\n", + "import utils\n", "from tqdm import tqdm\n", "\n", - "import utils, postprocessing, plotting\n", - "from HHbbVV.hh_vars import samples, nonres_samples, res_samples, nonres_sig_keys, res_sig_keys\n", + "from HHbbVV.hh_vars import (\n", + " nonres_samples,\n", + " nonres_sig_keys,\n", + " res_samples,\n", + " res_sig_keys,\n", + " samples,\n", + ")\n", "\n", "MAIN_DIR = Path(\"../../../\")\n", "\n", @@ -93,7 +99,6 @@ "source": [ "from collections import OrderedDict\n", "\n", - "\n", "res_samples = OrderedDict()\n", "\n", "res_mps = [\n", @@ -315,8 +320,8 @@ "metadata": {}, "outputs": [], "source": [ - "from sklearn.metrics import roc_curve, auc\n", "from scipy import integrate\n", + "from sklearn.metrics import auc, roc_curve\n", "\n", "rocs = {}\n", "# sig_key = \"HHbbVV\"\n", diff --git a/src/HHbbVV/postprocessing/InterpolateSignal.ipynb b/src/HHbbVV/postprocessing/InterpolateSignal.ipynb index 1b3a5be3..fa3036d2 100644 --- a/src/HHbbVV/postprocessing/InterpolateSignal.ipynb +++ b/src/HHbbVV/postprocessing/InterpolateSignal.ipynb @@ -6,27 +6,22 @@ "metadata": {}, "outputs": [], "source": [ - "import pickle, json, gzip\n", - "import numpy as np\n", - "import hist\n", - "from hist import Hist\n", + "from __future__ import annotations\n", "\n", - "from typing import Optional, List, Dict\n", - "from copy import copy\n", + "import pickle\n", + "from pathlib import Path\n", "\n", + "import hist\n", "import matplotlib.pyplot as plt\n", "import mplhep as hep\n", - "from matplotlib import colors\n", - "\n", + "import numpy as np\n", + "import plotting\n", + "from hist import Hist\n", + "from postprocessing import nonres_shape_vars as shape_vars\n", "from tqdm import tqdm\n", "\n", - "from pathlib import Path\n", - "import os\n", - "\n", - "from HHbbVV.hh_vars import years, bg_keys\n", + "from HHbbVV.hh_vars import years\n", "from HHbbVV.postprocessing import datacardHelpers\n", - "from postprocessing import nonres_shape_vars as shape_vars\n", - "import plotting\n", "\n", "plt.rcParams.update({\"font.size\": 16})\n", "plt.style.use(hep.style.CMS)" @@ -153,7 +148,7 @@ ")\n", "\n", "# the vector of symbolic sample cross sections\n", - "s = sympy.Matrix([[sympy.Symbol(\"xs{}\".format(i))] for i in range(len(csamples))])\n", + "s = sympy.Matrix([[sympy.Symbol(f\"xs{i}\")] for i in range(len(csamples))])\n", "\n", "# actual computation, i.e., matrix inversion and multiplications with vectors\n", "M_inv = M.pinv()\n", diff --git a/src/HHbbVV/postprocessing/NormTest.ipynb b/src/HHbbVV/postprocessing/NormTest.ipynb index ac25fc72..8dd58961 100644 --- a/src/HHbbVV/postprocessing/NormTest.ipynb +++ b/src/HHbbVV/postprocessing/NormTest.ipynb @@ -6,17 +6,18 @@ "metadata": {}, "outputs": [], "source": [ - "import pandas as pd\n", + "from __future__ import annotations\n", + "\n", "import pickle\n", + "import warnings\n", + "from os import listdir\n", + "from pathlib import Path\n", + "from typing import Dict, List\n", + "\n", "import matplotlib.pyplot as plt\n", "import numpy as np\n", - "import hist\n", - "import uproot\n", - "import warnings\n", - "from typing import List, Union, Dict\n", + "import pandas as pd\n", "import utils\n", - "from pathlib import Path\n", - "from os import listdir\n", "from hh_vars import norm_preserving_weights" ] }, @@ -150,7 +151,7 @@ " events[f\"weight_{wlabel}\"] /= totals[f\"np_{wlabel}\"]\n", " else:\n", " # normalize by the nominal\n", - " events[f\"weight_{wlabel}\"] /= totals[f\"np_nominal\"]\n", + " events[f\"weight_{wlabel}\"] /= totals[\"np_nominal\"]\n", "\n", " # normalize scale and PDF weights\n", " for wkey in [\"scale_weights\", \"pdf_weights\"]:\n", @@ -257,7 +258,7 @@ "\n", "for column in events:\n", " if \"weight\" in column[0] or \"Weight\" in column[0]:\n", - " print(f\"{str(column):<50} {np.sum(events[column]):.3f}\")" + " print(f\"{column!s:<50} {np.sum(events[column]):.3f}\")" ] }, { @@ -326,7 +327,7 @@ "metadata": {}, "outputs": [], "source": [ - "get_pickles(f\"../../../tmp/test_outputs/2017/hhbbvv/pickles\", \"2017\", \"GluGluToHHTobbVV_node_cHHH1\")" + "get_pickles(\"../../../tmp/test_outputs/2017/hhbbvv/pickles\", \"2017\", \"GluGluToHHTobbVV_node_cHHH1\")" ] }, { diff --git a/src/HHbbVV/postprocessing/PlotFits.ipynb b/src/HHbbVV/postprocessing/PlotFits.ipynb index 60ef0376..c1a0fea4 100644 --- a/src/HHbbVV/postprocessing/PlotFits.ipynb +++ b/src/HHbbVV/postprocessing/PlotFits.ipynb @@ -6,23 +6,23 @@ "metadata": {}, "outputs": [], "source": [ - "from collections import OrderedDict\n", + "from __future__ import annotations\n", "\n", - "import uproot\n", - "import numpy as np\n", - "import matplotlib.pyplot as plt\n", "import pickle\n", + "from collections import OrderedDict\n", + "from pathlib import Path\n", "\n", "import hist\n", - "from hist import Hist\n", - "\n", + "import matplotlib.pyplot as plt\n", + "import numpy as np\n", "import plotting\n", - "from HHbbVV.hh_vars import data_key, years, bg_keys, nonres_sig_keys\n", - "from postprocessing import nonres_shape_vars, get_nonres_selection_regions\n", + "import uproot\n", "from datacardHelpers import sum_templates\n", - "from HHbbVV.postprocessing import utils\n", + "from hist import Hist\n", + "from postprocessing import nonres_shape_vars\n", "\n", - "from pathlib import Path" + "from HHbbVV.hh_vars import bg_keys, data_key, years\n", + "from HHbbVV.postprocessing import utils" ] }, { @@ -49,7 +49,7 @@ "k2v0sig = False\n", "unblinded = True\n", "\n", - "plot_dir = MAIN_DIR / f\"plots/PostFit/24Jul18Fig8Prelim\"\n", + "plot_dir = MAIN_DIR / \"plots/PostFit/24Jul18Fig8Prelim\"\n", "# plot_dir = (\n", "# MAIN_DIR\n", "# / \"plots/PostFit/24Apr9ggFScan/nTF1/ggf_txbb_MP_ggf_bdt_0.9965_vbf_txbb_HP_vbf_bdt_0.999_lepton_veto_Hbb\"\n", @@ -81,7 +81,7 @@ "outputs": [], "source": [ "# templates_dir = Path(f\"templates/{cards_dir}\")\n", - "templates_dir = Path(f\"templates/24Apr26NonresBDT995AllSigs\")\n", + "templates_dir = Path(\"templates/24Apr26NonresBDT995AllSigs\")\n", "# templates_dir = Path(\n", "# f\"templates/24Apr9ggFScan/ggf_txbb_HP_ggf_bdt_0.996_vbf_txbb_HP_vbf_bdt_0.999_lepton_veto_Hbb\"\n", "# )\n", @@ -275,8 +275,6 @@ "metadata": {}, "outputs": [], "source": [ - "import matplotlib as mpl\n", - "import matplotlib.pyplot as plt\n", "import matplotlib.ticker as mticker\n", "import mplhep as hep\n", "\n", diff --git a/src/HHbbVV/postprocessing/PlotFitsFitd.ipynb b/src/HHbbVV/postprocessing/PlotFitsFitd.ipynb index cdfd031f..ce0769dd 100644 --- a/src/HHbbVV/postprocessing/PlotFitsFitd.ipynb +++ b/src/HHbbVV/postprocessing/PlotFitsFitd.ipynb @@ -6,23 +6,23 @@ "metadata": {}, "outputs": [], "source": [ - "from collections import OrderedDict\n", + "from __future__ import annotations\n", "\n", - "import uproot\n", - "import numpy as np\n", - "import matplotlib.pyplot as plt\n", "import pickle\n", + "from collections import OrderedDict\n", + "from pathlib import Path\n", "\n", "import hist\n", - "from hist import Hist\n", - "\n", + "import matplotlib.pyplot as plt\n", + "import numpy as np\n", "import plotting\n", - "from HHbbVV.hh_vars import data_key, years, bg_keys, nonres_sig_keys\n", - "from postprocessing import nonres_shape_vars, get_nonres_selection_regions\n", + "import uproot\n", "from datacardHelpers import sum_templates\n", - "from HHbbVV.postprocessing import utils\n", + "from hist import Hist\n", + "from postprocessing import nonres_shape_vars\n", "\n", - "from pathlib import Path" + "from HHbbVV.hh_vars import bg_keys, data_key, years\n", + "from HHbbVV.postprocessing import utils" ] }, { @@ -49,7 +49,7 @@ "k2v0sig = False\n", "unblinded = True\n", "\n", - "plot_dir = MAIN_DIR / f\"plots/PostFit/24Jul18Fig8Prelim\"\n", + "plot_dir = MAIN_DIR / \"plots/PostFit/24Jul18Fig8Prelim\"\n", "# plot_dir = (\n", "# MAIN_DIR\n", "# / \"plots/PostFit/24Apr9ggFScan/nTF1/ggf_txbb_MP_ggf_bdt_0.9965_vbf_txbb_HP_vbf_bdt_0.999_lepton_veto_Hbb\"\n", @@ -81,7 +81,7 @@ "outputs": [], "source": [ "# templates_dir = Path(f\"templates/{cards_dir}\")\n", - "templates_dir = Path(f\"templates/24Apr26NonresBDT995AllSigs\")\n", + "templates_dir = Path(\"templates/24Apr26NonresBDT995AllSigs\")\n", "# templates_dir = Path(\n", "# f\"templates/24Apr9ggFScan/ggf_txbb_HP_ggf_bdt_0.996_vbf_txbb_HP_vbf_bdt_0.999_lepton_veto_Hbb\"\n", "# )\n", @@ -275,8 +275,6 @@ "metadata": {}, "outputs": [], "source": [ - "import matplotlib as mpl\n", - "import matplotlib.pyplot as plt\n", "import matplotlib.ticker as mticker\n", "import mplhep as hep\n", "\n", diff --git a/src/HHbbVV/postprocessing/PlotFitsRes.ipynb b/src/HHbbVV/postprocessing/PlotFitsRes.ipynb index 6ff340a3..e1dafaac 100644 --- a/src/HHbbVV/postprocessing/PlotFitsRes.ipynb +++ b/src/HHbbVV/postprocessing/PlotFitsRes.ipynb @@ -6,20 +6,19 @@ "metadata": {}, "outputs": [], "source": [ - "from collections import OrderedDict\n", + "from __future__ import annotations\n", "\n", - "import uproot\n", - "import numpy as np\n", - "import matplotlib.pyplot as plt\n", + "import os\n", + "from collections import OrderedDict\n", "\n", "import hist\n", - "from hist import Hist\n", - "\n", + "import matplotlib.pyplot as plt\n", + "import numpy as np\n", "import plotting\n", + "import uproot\n", "from hh_vars import data_key\n", - "from postprocessing import res_shape_vars, get_res_selection_regions\n", - "\n", - "import os" + "from hist import Hist\n", + "from postprocessing import res_shape_vars" ] }, { diff --git a/src/HHbbVV/postprocessing/PostProcess.ipynb b/src/HHbbVV/postprocessing/PostProcess.ipynb index 4ea4476c..2a304559 100644 --- a/src/HHbbVV/postprocessing/PostProcess.ipynb +++ b/src/HHbbVV/postprocessing/PostProcess.ipynb @@ -13,43 +13,35 @@ "metadata": {}, "outputs": [], "source": [ - "import utils\n", + "from __future__ import annotations\n", + "\n", + "import os\n", + "import pickle\n", + "import warnings\n", + "\n", + "import hist\n", + "import matplotlib.pyplot as plt\n", + "import matplotlib.ticker as mticker\n", + "import mplhep as hep\n", + "import numpy as np\n", + "import pandas as pd\n", "import plotting\n", "import postprocessing\n", - "import corrections\n", - "from collections import OrderedDict\n", - "\n", + "import utils\n", + "from hist import Hist\n", + "from pandas.errors import SettingWithCopyWarning\n", + "from postprocessing import nonres_shape_vars\n", "from utils import CUT_MAX_VAL, ShapeVar\n", + "\n", "from HHbbVV.hh_vars import (\n", - " years,\n", - " data_key,\n", - " qcd_key,\n", " bg_keys,\n", - " samples,\n", - " nonres_sig_keys,\n", - " nonres_samples,\n", - " txbb_wps,\n", + " data_key,\n", " jec_shifts,\n", " jmsr_shifts,\n", - " LUMI,\n", + " nonres_samples,\n", + " samples,\n", + " years,\n", ")\n", - "from postprocessing import nonres_shape_vars\n", - "\n", - "import numpy as np\n", - "import pandas as pd\n", - "import pickle\n", - "from pandas.errors import SettingWithCopyWarning\n", - "import hist\n", - "from hist import Hist\n", - "\n", - "import os\n", - "from copy import deepcopy\n", - "from inspect import cleandoc\n", - "import warnings\n", - "\n", - "import matplotlib.pyplot as plt\n", - "import mplhep as hep\n", - "import matplotlib.ticker as mticker\n", "\n", "plt.style.use(hep.style.CMS)\n", "hep.style.use(\"CMS\")\n", diff --git a/src/HHbbVV/postprocessing/PostProcessRes.ipynb b/src/HHbbVV/postprocessing/PostProcessRes.ipynb index a00518ae..41078251 100644 --- a/src/HHbbVV/postprocessing/PostProcessRes.ipynb +++ b/src/HHbbVV/postprocessing/PostProcessRes.ipynb @@ -6,42 +6,29 @@ "metadata": {}, "outputs": [], "source": [ - "import utils\n", - "import plotting\n", - "import postprocessing\n", - "import corrections\n", - "\n", - "from utils import CUT_MAX_VAL, ShapeVar\n", - "from HHbbVV.hh_vars import (\n", - " years,\n", - " data_key,\n", - " qcd_key,\n", - " bg_keys,\n", - " samples,\n", - " nonres_sig_keys,\n", - " # res_samples,\n", - " # res_sig_keys,\n", - " nonres_samples,\n", - " txbb_wps,\n", - " jec_shifts,\n", - " jmsr_shifts,\n", - " LUMI,\n", - ")\n", - "from postprocessing import res_shape_vars, load_filters\n", + "from __future__ import annotations\n", "\n", + "import json\n", + "import os\n", + "import pickle\n", "from collections import OrderedDict\n", + "from pathlib import Path\n", "\n", "import numpy as np\n", "import pandas as pd\n", - "import pickle, json\n", - "import hist\n", - "from hist import Hist\n", + "import plotting\n", + "import postprocessing\n", + "import utils\n", + "from postprocessing import load_filters\n", + "from utils import CUT_MAX_VAL, ShapeVar\n", "\n", - "import os\n", - "from pathlib import Path\n", - "from copy import deepcopy\n", - "from inspect import cleandoc\n", - "import warnings" + "from HHbbVV.hh_vars import (\n", + " bg_keys,\n", + " nonres_samples,\n", + " nonres_sig_keys,\n", + " samples,\n", + " years,\n", + ")" ] }, { @@ -156,7 +143,6 @@ "metadata": {}, "outputs": [], "source": [ - "import matplotlib as mpl\n", "import matplotlib.pyplot as plt\n", "import matplotlib.ticker as mticker\n", "import mplhep as hep\n", @@ -278,7 +264,7 @@ "metadata": {}, "outputs": [], "source": [ - "with (MAIN_DIR / f\"plots/PostProcessing/24Mar6Mass/ControlPlots/2016/hists.pkl\").open(\"rb\") as f:\n", + "with (MAIN_DIR / \"plots/PostProcessing/24Mar6Mass/ControlPlots/2016/hists.pkl\").open(\"rb\") as f:\n", " hists2 = pickle.load(f)" ] }, @@ -341,6 +327,7 @@ "outputs": [], "source": [ "from collections import OrderedDict\n", + "\n", "from tqdm import tqdm\n", "\n", "sel, cf = utils.make_selection(\n", @@ -495,7 +482,7 @@ "metadata": {}, "outputs": [], "source": [ - "with open(f\"templates/Apr10//2017_templates.pkl\", \"rb\") as f:\n", + "with open(\"templates/Apr10//2017_templates.pkl\", \"rb\") as f:\n", " templates = pickle.load(f)" ] }, diff --git a/src/HHbbVV/postprocessing/PostProcessResOld.ipynb b/src/HHbbVV/postprocessing/PostProcessResOld.ipynb index ac9aa5c0..3c3fd388 100644 --- a/src/HHbbVV/postprocessing/PostProcessResOld.ipynb +++ b/src/HHbbVV/postprocessing/PostProcessResOld.ipynb @@ -6,41 +6,34 @@ "metadata": {}, "outputs": [], "source": [ - "import utils\n", + "from __future__ import annotations\n", + "\n", + "import json\n", + "import os\n", + "import pickle\n", + "from collections import OrderedDict\n", + "\n", + "import hist\n", + "import numpy as np\n", + "import pandas as pd\n", "import plotting\n", "import postprocessing\n", - "import corrections\n", - "\n", - "from utils import CUT_MAX_VAL, ShapeVar\n", + "import utils\n", "from hh_vars import (\n", - " years,\n", - " data_key,\n", - " qcd_key,\n", + " LUMI,\n", " bg_keys,\n", - " samples,\n", - " nonres_sig_keys,\n", + " jec_shifts,\n", + " jmsr_shifts,\n", " # res_samples,\n", " # res_sig_keys,\n", " nonres_samples,\n", - " txbb_wps,\n", - " jec_shifts,\n", - " jmsr_shifts,\n", - " LUMI,\n", + " nonres_sig_keys,\n", + " samples,\n", + " years,\n", ")\n", - "from postprocessing import res_shape_vars, new_filters, old_filters\n", - "\n", - "from collections import OrderedDict\n", - "\n", - "import numpy as np\n", - "import pandas as pd\n", - "import pickle, json\n", - "import hist\n", "from hist import Hist\n", - "\n", - "import os\n", - "from copy import deepcopy\n", - "from inspect import cleandoc\n", - "import warnings" + "from postprocessing import new_filters, res_shape_vars\n", + "from utils import CUT_MAX_VAL, ShapeVar" ] }, { @@ -261,7 +254,6 @@ "metadata": {}, "outputs": [], "source": [ - "import matplotlib\n", "import matplotlib.pyplot as plt\n", "import mplhep as hep\n", "\n", @@ -359,7 +351,7 @@ " events_dict,\n", " bb_masks,\n", ")\n", - "cutstr = f\"pass_noveto\"\n", + "cutstr = \"pass_noveto\"\n", "\n", "postprocessing.control_plots(\n", " events_dict,\n", @@ -430,6 +422,7 @@ "outputs": [], "source": [ "from collections import OrderedDict\n", + "\n", "from tqdm import tqdm\n", "\n", "sel, cf = utils.make_selection(\n", @@ -596,7 +589,7 @@ "metadata": {}, "outputs": [], "source": [ - "with open(f\"templates/Apr10//2017_templates.pkl\", \"rb\") as f:\n", + "with open(\"templates/Apr10//2017_templates.pkl\", \"rb\") as f:\n", " templates = pickle.load(f)" ] }, diff --git a/src/HHbbVV/postprocessing/PostProcessVBF.ipynb b/src/HHbbVV/postprocessing/PostProcessVBF.ipynb index 643cca60..7fed80f0 100644 --- a/src/HHbbVV/postprocessing/PostProcessVBF.ipynb +++ b/src/HHbbVV/postprocessing/PostProcessVBF.ipynb @@ -6,42 +6,25 @@ "metadata": {}, "outputs": [], "source": [ - "import utils\n", - "import plotting\n", - "import postprocessing\n", - "import corrections\n", - "from collections import OrderedDict\n", + "from __future__ import annotations\n", "\n", - "from utils import CUT_MAX_VAL, ShapeVar\n", + "import os\n", + "\n", + "import matplotlib.pyplot as plt\n", + "import matplotlib.ticker as mticker\n", + "import mplhep as hep\n", + "import pandas as pd\n", + "import postprocessing\n", + "import utils\n", "from hh_vars import (\n", - " years,\n", - " data_key,\n", - " qcd_key,\n", " bg_keys,\n", - " samples,\n", - " nonres_sig_keys,\n", " nonres_samples,\n", - " txbb_wps,\n", - " jec_shifts,\n", - " jmsr_shifts,\n", + " nonres_sig_keys,\n", + " samples,\n", ")\n", - "from postprocessing import nonres_shape_vars, Region\n", - "\n", - "import numpy as np\n", - "import pandas as pd\n", - "import pickle\n", "\n", "# from pandas.errors import SettingWithCopyWarning\n", - "from hist import Hist\n", - "\n", - "import os\n", - "from copy import deepcopy\n", - "from inspect import cleandoc\n", - "import warnings\n", - "\n", - "import matplotlib.pyplot as plt\n", - "import mplhep as hep\n", - "import matplotlib.ticker as mticker\n", + "from utils import CUT_MAX_VAL, ShapeVar\n", "\n", "plt.style.use(hep.style.CMS)\n", "hep.style.use(\"CMS\")\n", diff --git a/src/HHbbVV/postprocessing/PostProcessVBFtesting.ipynb b/src/HHbbVV/postprocessing/PostProcessVBFtesting.ipynb index d10e274d..9ea5979b 100644 --- a/src/HHbbVV/postprocessing/PostProcessVBFtesting.ipynb +++ b/src/HHbbVV/postprocessing/PostProcessVBFtesting.ipynb @@ -6,43 +6,27 @@ "metadata": {}, "outputs": [], "source": [ - "import utils\n", - "import plotting\n", - "import postprocessing\n", - "import corrections\n", - "from collections import OrderedDict\n", + "from __future__ import annotations\n", "\n", - "from utils import CUT_MAX_VAL, ShapeVar\n", + "import os\n", + "\n", + "import matplotlib.pyplot as plt\n", + "import matplotlib.ticker as mticker\n", + "import mplhep as hep\n", + "import numpy as np\n", + "import pandas as pd\n", + "import postprocessing\n", + "import utils\n", + "import vector\n", "from hh_vars import (\n", - " years,\n", - " data_key,\n", - " qcd_key,\n", " bg_keys,\n", - " samples,\n", - " nonres_sig_keys,\n", " nonres_samples,\n", - " txbb_wps,\n", - " jec_shifts,\n", - " jmsr_shifts,\n", + " nonres_sig_keys,\n", + " samples,\n", ")\n", - "from postprocessing import nonres_shape_vars, Region\n", - "\n", - "import numpy as np\n", - "import pandas as pd\n", - "import pickle\n", "\n", "# from pandas.errors import SettingWithCopyWarning\n", - "from hist import Hist\n", - "\n", - "import os\n", - "from copy import deepcopy\n", - "from inspect import cleandoc\n", - "import warnings\n", - "\n", - "import matplotlib.pyplot as plt\n", - "import mplhep as hep\n", - "import matplotlib.ticker as mticker\n", - "import vector\n", + "from utils import CUT_MAX_VAL, ShapeVar\n", "\n", "plt.style.use(hep.style.CMS)\n", "hep.style.use(\"CMS\")\n", @@ -279,14 +263,14 @@ " # Adding variables defined in HIG-20-005 that show strong differentiation for VBF signal events and background\n", "\n", " # seperation between both ak8 higgs jets\n", - " df[f\"vbf_dR_HH\"] = VVJet.deltaR(bbJet)\n", - " df[f\"vbf_dR_j0_HVV\"] = vbf1.deltaR(VVJet)\n", - " df[f\"vbf_dR_j1_HVV\"] = vbf2.deltaR(VVJet)\n", - " df[f\"vbf_dR_j0_Hbb\"] = vbf1.deltaR(bbJet)\n", - " df[f\"vbf_dR_j1_Hbb\"] = vbf2.deltaR(bbJet)\n", - " df[f\"vbf_dR_jj\"] = vbf1.deltaR(vbf2)\n", - " df[f\"vbf_Mass_jj\"] = jj.M\n", - " df[f\"vbf_dEta_jj\"] = np.abs(vbf1.eta - vbf2.eta)\n", + " df[\"vbf_dR_HH\"] = VVJet.deltaR(bbJet)\n", + " df[\"vbf_dR_j0_HVV\"] = vbf1.deltaR(VVJet)\n", + " df[\"vbf_dR_j1_HVV\"] = vbf2.deltaR(VVJet)\n", + " df[\"vbf_dR_j0_Hbb\"] = vbf1.deltaR(bbJet)\n", + " df[\"vbf_dR_j1_Hbb\"] = vbf2.deltaR(bbJet)\n", + " df[\"vbf_dR_jj\"] = vbf1.deltaR(vbf2)\n", + " df[\"vbf_Mass_jj\"] = jj.M\n", + " df[\"vbf_dEta_jj\"] = np.abs(vbf1.eta - vbf2.eta)\n", "\n", " # Subleading VBF-jet cos(θ) in the HH+2j center of mass frame:\n", " # https://github.com/scikit-hep/vector/blob/main/src/vector/_methods.py#L916\n", @@ -296,13 +280,13 @@ " # Leading VBF-jet cos(θ) in the HH+2j center of mass frame:\n", " thetab1 = 2 * np.arctan(np.exp(-j1_CMF.eta))\n", " thetab1 = np.cos(thetab1) # 12\n", - " df[f\"vbf_cos_j1\"] = np.abs(thetab1)\n", + " df[\"vbf_cos_j1\"] = np.abs(thetab1)\n", "\n", " # Subleading VBF-jet cos(θ) in the HH+2j center of mass frame:\n", " j2_CMF = vbf2.boostCM_of_p4(system_4vec)\n", " thetab2 = 2 * np.arctan(np.exp(-j2_CMF.eta))\n", " thetab2 = np.cos(thetab2)\n", - " df[f\"vbf_cos_j2\"] = np.abs(thetab2)\n", + " df[\"vbf_cos_j2\"] = np.abs(thetab2)\n", "\n", " # H1-centrality * H2-centrality:\n", " delta_eta = vbf1.eta - vbf2.eta\n", @@ -311,7 +295,7 @@ " -np.power((VVJet.eta - avg_eta) / delta_eta, 2)\n", " - np.power((bbJet.eta - avg_eta) / delta_eta, 2)\n", " )\n", - " df[f\"vbf_prod_centrality\"] = prod_centrality" + " df[\"vbf_prod_centrality\"] = prod_centrality" ] }, { diff --git a/src/HHbbVV/postprocessing/TTbarCheck.ipynb b/src/HHbbVV/postprocessing/TTbarCheck.ipynb index b7a78abe..5bdc6db8 100644 --- a/src/HHbbVV/postprocessing/TTbarCheck.ipynb +++ b/src/HHbbVV/postprocessing/TTbarCheck.ipynb @@ -6,46 +6,34 @@ "metadata": {}, "outputs": [], "source": [ - "import utils\n", - "import plotting\n", - "import postprocessing\n", - "import corrections\n", + "from __future__ import annotations\n", "\n", - "from utils import CUT_MAX_VAL\n", + "import os\n", + "import warnings\n", + "\n", + "import matplotlib.pyplot as plt\n", + "import matplotlib.ticker as mticker\n", + "import mplhep as hep\n", + "import numpy as np\n", + "import pandas as pd\n", + "import postprocessing\n", + "import utils\n", "from hh_vars import (\n", - " years,\n", - " sig_key,\n", - " data_key,\n", - " qcd_key,\n", - " bg_keys,\n", - " samples,\n", - " txbb_wps,\n", " jec_shifts,\n", " jmsr_shifts,\n", + " samples,\n", + " sig_key,\n", ")\n", + "from pandas.errors import SettingWithCopyWarning\n", "from postprocessing import (\n", - " shape_var,\n", - " shape_bins,\n", " blind_window,\n", " selection_regions,\n", " selection_regions_label,\n", " # selection_regions_year,\n", + " shape_bins,\n", + " shape_var,\n", ")\n", - "\n", - "import numpy as np\n", - "import pandas as pd\n", - "import pickle\n", - "from pandas.errors import SettingWithCopyWarning\n", - "from hist import Hist\n", - "\n", - "import os\n", - "from copy import deepcopy\n", - "from inspect import cleandoc\n", - "import warnings\n", - "\n", - "import matplotlib.pyplot as plt\n", - "import mplhep as hep\n", - "import matplotlib.ticker as mticker\n", + "from utils import CUT_MAX_VAL\n", "\n", "plt.style.use(hep.style.CMS)\n", "hep.style.use(\"CMS\")\n", diff --git a/src/HHbbVV/postprocessing/TopAnalysis.ipynb b/src/HHbbVV/postprocessing/TopAnalysis.ipynb index 18c2c9a9..da71569e 100644 --- a/src/HHbbVV/postprocessing/TopAnalysis.ipynb +++ b/src/HHbbVV/postprocessing/TopAnalysis.ipynb @@ -6,26 +6,27 @@ "metadata": {}, "outputs": [], "source": [ - "import utils\n", - "import plotting\n", - "import numpy as np\n", + "from __future__ import annotations\n", + "\n", "import warnings\n", - "import pandas as pd\n", "\n", - "from pandas.errors import SettingWithCopyWarning\n", - "from hh_vars import data_key\n", + "import numpy as np\n", + "import pandas as pd\n", + "import plotting\n", "import postprocessing\n", + "import utils\n", + "from hh_vars import data_key\n", + "from pandas.errors import SettingWithCopyWarning\n", "\n", "# ignore these because they don't seem to apply\n", "warnings.simplefilter(action=\"ignore\", category=SettingWithCopyWarning)\n", "\n", - "from PyPDF2 import PdfMerger\n", - "\n", "from copy import deepcopy\n", "\n", "import matplotlib.pyplot as plt\n", - "import mplhep as hep\n", "import matplotlib.ticker as mticker\n", + "import mplhep as hep\n", + "from PyPDF2 import PdfMerger\n", "\n", "plt.style.use(hep.style.CMS)\n", "hep.style.use(\"CMS\")\n", diff --git a/src/HHbbVV/postprocessing/TopAnalysisOld.ipynb b/src/HHbbVV/postprocessing/TopAnalysisOld.ipynb index 7169e2e4..1021eaa3 100644 --- a/src/HHbbVV/postprocessing/TopAnalysisOld.ipynb +++ b/src/HHbbVV/postprocessing/TopAnalysisOld.ipynb @@ -6,26 +6,27 @@ "metadata": {}, "outputs": [], "source": [ - "import utils\n", - "import plotting\n", - "import numpy as np\n", + "from __future__ import annotations\n", + "\n", "import warnings\n", - "import pandas as pd\n", "\n", - "from pandas.errors import SettingWithCopyWarning\n", - "from hh_vars import data_key\n", + "import numpy as np\n", + "import pandas as pd\n", + "import plotting\n", "import postprocessing\n", + "import utils\n", + "from hh_vars import data_key\n", + "from pandas.errors import SettingWithCopyWarning\n", "\n", "# ignore these because they don't seem to apply\n", "warnings.simplefilter(action=\"ignore\", category=SettingWithCopyWarning)\n", "\n", - "from PyPDF2 import PdfMerger\n", - "\n", "from copy import deepcopy\n", "\n", "import matplotlib.pyplot as plt\n", - "import mplhep as hep\n", "import matplotlib.ticker as mticker\n", + "import mplhep as hep\n", + "from PyPDF2 import PdfMerger\n", "\n", "plt.style.use(hep.style.CMS)\n", "hep.style.use(\"CMS\")\n", diff --git a/src/HHbbVV/postprocessing/TriggerSFs.ipynb b/src/HHbbVV/postprocessing/TriggerSFs.ipynb index d64123a1..384ddf47 100644 --- a/src/HHbbVV/postprocessing/TriggerSFs.ipynb +++ b/src/HHbbVV/postprocessing/TriggerSFs.ipynb @@ -6,29 +6,26 @@ "metadata": {}, "outputs": [], "source": [ - "import utils\n", - "import plotting\n", - "import postprocessing\n", - "import numpy as np\n", + "from __future__ import annotations\n", + "\n", "import warnings\n", + "\n", + "import numpy as np\n", "import pandas as pd\n", + "import utils\n", + "from hh_vars import data_key, samples, sig_key\n", "from pandas.errors import SettingWithCopyWarning\n", - "from hh_vars import samples, sig_key, qcd_key, data_key\n", - "import os\n", "from utils import CUT_MAX_VAL\n", "\n", "# ignore these because they don't seem to apply\n", "warnings.simplefilter(action=\"ignore\", category=SettingWithCopyWarning)\n", "\n", - "from PyPDF2 import PdfFileMerger\n", "\n", "import pickle\n", "\n", - "from copy import deepcopy\n", - "\n", "import matplotlib.pyplot as plt\n", - "import mplhep as hep\n", "import matplotlib.ticker as mticker\n", + "import mplhep as hep\n", "\n", "plt.style.use(hep.style.CMS)\n", "hep.style.use(\"CMS\")\n", @@ -214,8 +211,8 @@ "metadata": {}, "outputs": [], "source": [ - "from hist.intervals import clopper_pearson_interval\n", "from coffea.lookup_tools.dense_lookup import dense_lookup\n", + "from hist.intervals import clopper_pearson_interval\n", "\n", "trig_errors = {}\n", "trig_intervals = {}\n", diff --git a/src/HHbbVV/postprocessing/VV_analysis.ipynb b/src/HHbbVV/postprocessing/VV_analysis.ipynb index e6436e4e..621efaf3 100644 --- a/src/HHbbVV/postprocessing/VV_analysis.ipynb +++ b/src/HHbbVV/postprocessing/VV_analysis.ipynb @@ -14,25 +14,25 @@ "metadata": {}, "outputs": [], "source": [ - "import utils\n", - "import plotting\n", - "import postprocessing\n", - "import numpy as np\n", + "from __future__ import annotations\n", + "\n", "import warnings\n", + "\n", + "import numpy as np\n", "import pandas as pd\n", + "import postprocessing\n", + "import utils\n", + "from hh_vars import nonres_samples, nonres_sig_keys\n", "from pandas.errors import SettingWithCopyWarning\n", - "from hh_vars import nonres_samples, samples, nonres_sig_keys, data_key\n", "\n", "# ignore these because they don't seem to apply\n", "warnings.simplefilter(action=\"ignore\", category=SettingWithCopyWarning)\n", "\n", - "from PyPDF2 import PdfFileMerger\n", "\n", - "from copy import deepcopy\n", "\n", "import matplotlib.pyplot as plt\n", - "import mplhep as hep\n", "import matplotlib.ticker as mticker\n", + "import mplhep as hep\n", "\n", "plt.style.use(hep.style.CMS)\n", "hep.style.use(\"CMS\")\n", diff --git a/src/HHbbVV/postprocessing/check_outputs.ipynb b/src/HHbbVV/postprocessing/check_outputs.ipynb index 08796ac3..481a1034 100644 --- a/src/HHbbVV/postprocessing/check_outputs.ipynb +++ b/src/HHbbVV/postprocessing/check_outputs.ipynb @@ -6,9 +6,12 @@ "metadata": {}, "outputs": [], "source": [ + "from __future__ import annotations\n", + "\n", + "import pickle\n", + "\n", "import numpy as np\n", - "import pandas as pd\n", - "import pickle" + "import pandas as pd" ] }, { diff --git a/src/HHbbVV/processors/GenSelection.ipynb b/src/HHbbVV/processors/GenSelection.ipynb index e1b8e254..cc12b51b 100644 --- a/src/HHbbVV/processors/GenSelection.ipynb +++ b/src/HHbbVV/processors/GenSelection.ipynb @@ -6,33 +6,22 @@ "metadata": {}, "outputs": [], "source": [ - "import uproot\n", + "from __future__ import annotations\n", + "\n", "import awkward as ak\n", "from coffea import nanoevents\n", - "from coffea.nanoevents.methods.base import NanoEventsArray\n", - "from coffea.analysis_tools import Weights, PackedSelection\n", - "from coffea.nanoevents.methods import nanoaod\n", + "from coffea.analysis_tools import PackedSelection\n", "from coffea.nanoevents.methods import vector\n", - "from coffea.lookup_tools.dense_lookup import dense_lookup\n", "\n", "ak.behavior.update(vector.behavior)\n", "\n", - "import pickle, json, gzip\n", - "import numpy as np\n", - "\n", - "from typing import Optional, List, Dict\n", - "from copy import copy\n", + "import os\n", + "import pickle\n", "\n", "import matplotlib.pyplot as plt\n", - "import mplhep as hep\n", - "from matplotlib import colors\n", - "\n", - "from tqdm import tqdm\n", - "\n", - "import os\n", + "import numpy as np\n", "\n", "# import corrections\n", - "import correctionlib\n", "\n", "# import utils" ] diff --git a/src/HHbbVV/processors/SemiResolvedVetoGenStudy.ipynb b/src/HHbbVV/processors/SemiResolvedVetoGenStudy.ipynb index 8f12ca08..0661391c 100644 --- a/src/HHbbVV/processors/SemiResolvedVetoGenStudy.ipynb +++ b/src/HHbbVV/processors/SemiResolvedVetoGenStudy.ipynb @@ -13,27 +13,16 @@ "metadata": {}, "outputs": [], "source": [ - "import uproot\n", + "from __future__ import annotations\n", + "\n", "import awkward as ak\n", "from coffea import nanoevents\n", - "from coffea.nanoevents.methods.base import NanoEventsArray\n", - "from coffea.analysis_tools import Weights, PackedSelection\n", - "from coffea.nanoevents.methods import nanoaod\n", "from coffea.nanoevents.methods import vector\n", - "from coffea.lookup_tools.dense_lookup import dense_lookup\n", "\n", "ak.behavior.update(vector.behavior)\n", "\n", - "import pickle, json, gzip\n", - "import numpy as np\n", - "\n", - "from typing import Optional, List, Dict\n", - "from copy import copy\n", - "\n", "import matplotlib.pyplot as plt\n", - "import mplhep as hep\n", - "from matplotlib import colors\n", - "\n", + "import numpy as np\n", "from utils import pad_val" ] }, diff --git a/src/HHbbVV/processors/debug_scores_rk.ipynb b/src/HHbbVV/processors/debug_scores_rk.ipynb index 12beaa30..a5d7a2e1 100644 --- a/src/HHbbVV/processors/debug_scores_rk.ipynb +++ b/src/HHbbVV/processors/debug_scores_rk.ipynb @@ -7,11 +7,13 @@ "metadata": {}, "outputs": [], "source": [ - "import uproot\n", + "from __future__ import annotations\n", + "\n", "import awkward as ak\n", + "import matplotlib.pyplot as plt\n", "import numpy as np\n", - "from coffea import nanoevents\n", - "import matplotlib.pyplot as plt" + "import uproot\n", + "from coffea import nanoevents" ] }, { diff --git a/src/HHbbVV/resonant_studies/QuarkFractions.ipynb b/src/HHbbVV/resonant_studies/QuarkFractions.ipynb index 19025b51..953b65fc 100644 --- a/src/HHbbVV/resonant_studies/QuarkFractions.ipynb +++ b/src/HHbbVV/resonant_studies/QuarkFractions.ipynb @@ -6,15 +6,15 @@ "metadata": {}, "outputs": [], "source": [ - "import pickle\n", - "import os\n", + "from __future__ import annotations\n", "\n", - "import numpy as np\n", + "import os\n", + "import pickle\n", "\n", - "import matplotlib\n", "import matplotlib.pyplot as plt\n", - "import mplhep as hep\n", "import matplotlib.ticker as mticker\n", + "import mplhep as hep\n", + "import numpy as np\n", "\n", "plt.style.use(hep.style.CMS)\n", "hep.style.use(\"CMS\")\n", diff --git a/src/HHbbVV/scale_factors/VV_reweighting.ipynb b/src/HHbbVV/scale_factors/VV_reweighting.ipynb index 0056984e..f6e28c71 100644 --- a/src/HHbbVV/scale_factors/VV_reweighting.ipynb +++ b/src/HHbbVV/scale_factors/VV_reweighting.ipynb @@ -14,41 +14,28 @@ } ], "source": [ - "import uproot\n", + "from __future__ import annotations\n", + "\n", "import awkward as ak\n", + "import uproot\n", "from coffea import nanoevents\n", - "from coffea.nanoevents.methods.base import NanoEventsArray\n", - "from coffea.analysis_tools import Weights, PackedSelection\n", - "from coffea.nanoevents.methods import nanoaod\n", - "from coffea.nanoevents.methods import vector\n", + "from coffea.analysis_tools import PackedSelection\n", "from coffea.lookup_tools.dense_lookup import dense_lookup\n", - "from coffea.nanoevents.methods.nanoaod import MuonArray, JetArray, FatJetArray, GenParticleArray\n", + "from coffea.nanoevents.methods import vector\n", + "from coffea.nanoevents.methods.base import NanoEventsArray\n", + "from coffea.nanoevents.methods.nanoaod import (\n", + " GenParticleArray,\n", + ")\n", "\n", "ak.behavior.update(vector.behavior)\n", "\n", - "import pickle, json, gzip\n", - "import numpy as np\n", - "\n", - "from typing import Optional, List, Dict, Tuple\n", - "from copy import copy\n", - "\n", - "import matplotlib.pyplot as plt\n", - "import mplhep as hep\n", - "from matplotlib import colors\n", - "\n", - "from tqdm import tqdm\n", - "\n", "# import fastjet\n", - "\n", - "import pathlib\n", - "\n", "# import jetnet\n", - "\n", "import os\n", + "from typing import Dict, List\n", "\n", - "import corrections\n", - "import correctionlib\n", - "\n", + "import matplotlib.pyplot as plt\n", + "import numpy as np\n", "from utils import P4" ] }, @@ -855,7 +842,6 @@ "\n", "\n", "def _get_lund_lookups(seed: int = 42, lnN: bool = True, trunc_gauss: bool = False):\n", - " import uproot\n", "\n", " # initialize lund plane scale factors lookups\n", " f = uproot.open(package_path + \"/corrections/lp_ratio_jan20.root\")\n", diff --git a/src/HHbbVV/scale_factors/check_cuts.ipynb b/src/HHbbVV/scale_factors/check_cuts.ipynb index 78ad6ec4..09d23004 100644 --- a/src/HHbbVV/scale_factors/check_cuts.ipynb +++ b/src/HHbbVV/scale_factors/check_cuts.ipynb @@ -6,10 +6,12 @@ "metadata": {}, "outputs": [], "source": [ + "from __future__ import annotations\n", + "\n", + "import pickle\n", + "\n", "import h5py\n", - "import pandas as pd\n", - "import numpy as np\n", - "import pickle" + "import pandas as pd" ] }, { diff --git a/src/HHbbVV/scale_factors/gen_clustering.ipynb b/src/HHbbVV/scale_factors/gen_clustering.ipynb index 114c8f21..7ea77791 100644 --- a/src/HHbbVV/scale_factors/gen_clustering.ipynb +++ b/src/HHbbVV/scale_factors/gen_clustering.ipynb @@ -6,27 +6,21 @@ "metadata": {}, "outputs": [], "source": [ - "import uproot\n", - "import awkward as ak\n", - "from coffea import nanoevents\n", - "from coffea.nanoevents.methods.base import NanoEventsArray\n", - "\n", - "import pickle\n", - "import numpy as np\n", + "from __future__ import annotations\n", "\n", - "from typing import Optional, List, Dict\n", + "import os\n", "from copy import copy\n", "\n", - "import matplotlib.pyplot as plt\n", - "import mplhep as hep\n", - "from matplotlib import colors\n", - "\n", - "from tqdm import tqdm\n", + "import awkward as ak\n", "import fastjet\n", "import jetnet\n", + "import matplotlib.pyplot as plt\n", + "import mplhep as hep\n", + "import numpy as np\n", "import vector\n", - "\n", - "import os" + "from coffea import nanoevents\n", + "from matplotlib import colors\n", + "from tqdm import tqdm" ] }, { diff --git a/src/HHbbVV/scale_factors/top_reweighting.ipynb b/src/HHbbVV/scale_factors/top_reweighting.ipynb index fcff82dd..2cd4f0d6 100644 --- a/src/HHbbVV/scale_factors/top_reweighting.ipynb +++ b/src/HHbbVV/scale_factors/top_reweighting.ipynb @@ -6,39 +6,29 @@ "metadata": {}, "outputs": [], "source": [ - "import uproot\n", + "from __future__ import annotations\n", + "\n", "import awkward as ak\n", + "import uproot\n", "from coffea import nanoevents\n", - "from coffea.nanoevents.methods.base import NanoEventsArray\n", - "from coffea.analysis_tools import Weights, PackedSelection\n", - "from coffea.nanoevents.methods import nanoaod\n", - "from coffea.nanoevents.methods import vector\n", + "from coffea.analysis_tools import PackedSelection, Weights\n", "from coffea.lookup_tools.dense_lookup import dense_lookup\n", + "from coffea.nanoevents.methods import nanoaod, vector\n", "\n", "ak.behavior.update(vector.behavior)\n", "\n", - "import pickle, json, gzip\n", - "import numpy as np\n", - "\n", - "from typing import Optional, List, Dict\n", - "from copy import copy\n", - "\n", - "import matplotlib.pyplot as plt\n", - "import mplhep as hep\n", - "from matplotlib import colors\n", - "\n", - "from tqdm import tqdm\n", - "\n", - "import fastjet\n", + "import gzip\n", + "import json\n", "\n", "# import jetnet\n", - "\n", "import os\n", "\n", - "from HHbbVV.processors import corrections\n", - "import correctionlib\n", + "import fastjet\n", + "import matplotlib.pyplot as plt\n", + "import numpy as np\n", + "import utils\n", "\n", - "import utils" + "from HHbbVV.processors import corrections" ] }, { @@ -459,9 +449,9 @@ " np.random.seed(42)\n", " smearing = np.random.normal(size=mass.shape)\n", " # scale to JMR nom, down, up (minimum at 0)\n", - " jmr_nom, jmr_down, jmr_up = [\n", + " jmr_nom, jmr_down, jmr_up = (\n", " (smearing * max(jmrValues[mkey][year][i] - 1, 0) + 1) for i in range(3)\n", - " ]\n", + " )\n", " jms_nom, jms_down, jms_up = jmsValues[mkey][year]\n", "\n", " mass_jms = mass * jms_nom\n", @@ -616,7 +606,6 @@ "metadata": {}, "outputs": [], "source": [ - "import json, gzip\n", "\n", "with gzip.open(corrections.get_pog_json(\"jec\", \"2017\"), \"r\") as fin:\n", " jec = json.loads(fin.read().decode(\"utf-8\"))\n", diff --git a/src/HHbbVV/tagger/high_mass_check.ipynb b/src/HHbbVV/tagger/high_mass_check.ipynb index d5dcb467..31e25b62 100644 --- a/src/HHbbVV/tagger/high_mass_check.ipynb +++ b/src/HHbbVV/tagger/high_mass_check.ipynb @@ -6,12 +6,12 @@ "metadata": {}, "outputs": [], "source": [ - "import uproot\n", - "import awkward as ak\n", - "from coffea import nanoevents\n", - "import numpy as np\n", + "from __future__ import annotations\n", + "\n", "import matplotlib.pyplot as plt\n", "import mplhep as hep\n", + "import numpy as np\n", + "from coffea import nanoevents\n", "\n", "plt.style.use(hep.style.CMS)\n", "hep.style.use(\"CMS\")\n", diff --git a/src/HHbbVV/triton/export_and_check.ipynb b/src/HHbbVV/triton/export_and_check.ipynb index c0f9ed16..34df53d7 100644 --- a/src/HHbbVV/triton/export_and_check.ipynb +++ b/src/HHbbVV/triton/export_and_check.ipynb @@ -6,14 +6,13 @@ "metadata": {}, "outputs": [], "source": [ - "from typing import Optional, List, Dict\n", + "from __future__ import annotations\n", "\n", - "import numpy as np\n", - "import scipy\n", + "from typing import Dict\n", "\n", + "import numpy as np\n", "import tritonclient.grpc as triton_grpc\n", "import tritonclient.http as triton_http\n", - "\n", "from tqdm import tqdm" ] }, @@ -150,7 +149,6 @@ "import onnx\n", "import onnxruntime as ort\n", "\n", - "\n", "model_dir = (\n", " \"models/model_2023May30/ak8_MD_inclv8_part_2reg_manual.useamp.lite.gm5.ddp-bs768-lr6p75e-3/\"\n", ")\n", @@ -178,7 +176,6 @@ "import onnx\n", "import onnxruntime as ort\n", "\n", - "\n", "model_dir = (\n", " \"models/model_2023May30/ak8_MD_inclv8_part_2reg_manual.useamp.lite.gm5.ddp-bs768-lr6p75e-3/\"\n", ")\n", @@ -276,7 +273,7 @@ " \"output_names\": [\"softmax\"],\n", " \"dynamic_axes\": {\n", " **{k: {0: \"N\", 2: \"n_\" + k.split(\"_\")[0]} for k in data_config[\"input_names\"]},\n", - " **{\"softmax\": {0: \"N\"}},\n", + " \"softmax\": {0: \"N\"},\n", " },\n", "}\n", "\n", @@ -290,7 +287,7 @@ " model_dir + \"ak8_MD_inclv8_part_2reg_manual.useamp.lite.gm5.ddp-bs768-lr6p75e-3/model.onnx\",\n", " input_names=model_info[\"input_names\"],\n", " output_names=model_info[\"output_names\"],\n", - " dynamic_axes=model_info.get(\"dynamic_axes\", None),\n", + " dynamic_axes=model_info.get(\"dynamic_axes\"),\n", " opset_version=11,\n", ")" ] @@ -392,7 +389,7 @@ " \"output_names\": [\"softmax\"],\n", " \"dynamic_axes\": {\n", " **{k: {0: \"N\", 2: \"n_\" + k.split(\"_\")[0]} for k in data_config[\"input_names\"]},\n", - " **{\"softmax\": {0: \"N\"}},\n", + " \"softmax\": {0: \"N\"},\n", " },\n", "}\n", "\n", diff --git a/src/binder/VBFPlots.ipynb b/src/binder/VBFPlots.ipynb index 97903808..cc9002ca 100644 --- a/src/binder/VBFPlots.ipynb +++ b/src/binder/VBFPlots.ipynb @@ -6,31 +6,21 @@ "metadata": {}, "outputs": [], "source": [ - "import uproot\n", + "from __future__ import annotations\n", + "\n", "import awkward as ak\n", "from coffea import nanoevents\n", - "from coffea.nanoevents.methods.base import NanoEventsArray\n", - "from coffea.analysis_tools import Weights, PackedSelection\n", - "from coffea.nanoevents.methods import nanoaod\n", "from coffea.nanoevents.methods import vector\n", "\n", "ak.behavior.update(vector.behavior)\n", "\n", - "import pickle, json, gzip\n", - "import numpy as np\n", - "\n", - "from typing import Optional, List, Dict\n", - "from copy import copy\n", + "from pathlib import Path\n", "\n", "import matplotlib.pyplot as plt\n", "import mplhep as hep\n", + "import numpy as np\n", "from matplotlib import colors\n", "\n", - "from tqdm import tqdm\n", - "\n", - "from pathlib import Path\n", - "import os\n", - "\n", "plt.rcParams.update({\"font.size\": 16})\n", "plt.style.use(hep.style.CMS)" ] @@ -143,7 +133,7 @@ "outputs": [], "source": [ "def get_interpolation(mhh):\n", - " from scipy.interpolate import interp1d, CubicSpline, UnivariateSpline\n", + " from scipy.interpolate import UnivariateSpline\n", "\n", " counts, bins = np.histogram(mhh, bins=np.logspace(np.log10(280), np.log10(1500), 20))\n", "\n", @@ -170,7 +160,7 @@ "outputs": [], "source": [ "def plot_interpolation(ax, mhh, weights, label):\n", - " from scipy.interpolate import interp1d, CubicSpline, UnivariateSpline\n", + " from scipy.interpolate import UnivariateSpline\n", "\n", " counts, bins = np.histogram(\n", " mhh, bins=np.logspace(np.log10(280), np.log10(1500), 31), weights=weights\n", @@ -246,7 +236,7 @@ ")\n", "\n", "# the vector of symbolic sample cross sections\n", - "s = sympy.Matrix([[sympy.Symbol(\"xs{}\".format(i))] for i in range(len(csamples))])\n", + "s = sympy.Matrix([[sympy.Symbol(f\"xs{i}\")] for i in range(len(csamples))])\n", "\n", "# actual computation, i.e., matrix inversion and multiplications with vectors\n", "M_inv = M.pinv()\n", @@ -339,7 +329,9 @@ " ((1, 2, 1), r\"$\\kappa_{2V}=2$\"),\n", " ]\n", "):\n", - " from scipy.interpolate import interp1d, CubicSpline, UnivariateSpline, splrep, BSpline, Rbf\n", + " from scipy.interpolate import (\n", + " Rbf,\n", + " )\n", "\n", " bin_sizes = np.diff(bins)\n", " interp_counts = get_hist_interp(*sample) / bin_sizes\n", diff --git a/src/binder/calcTriggerSFUncertainties-Jul21.ipynb b/src/binder/calcTriggerSFUncertainties-Jul21.ipynb index a415fbe4..15f0ac72 100644 --- a/src/binder/calcTriggerSFUncertainties-Jul21.ipynb +++ b/src/binder/calcTriggerSFUncertainties-Jul21.ipynb @@ -6,7 +6,10 @@ "metadata": {}, "outputs": [], "source": [ + "from __future__ import annotations\n", + "\n", "import pickle\n", + "\n", "from hist.intervals import clopper_pearson_interval" ] }, @@ -631,7 +634,6 @@ ") as filehandler:\n", " trigEffsDict[\"effserr\"] = pickle.load(filehandler)\n", "\n", - "import hist\n", "import numpy as np\n", "from coffea.lookup_tools.dense_lookup import dense_lookup\n", "\n", diff --git a/src/binder/calcTriggerSFUncertainties.ipynb b/src/binder/calcTriggerSFUncertainties.ipynb index 15d29fa8..d2c86f55 100644 --- a/src/binder/calcTriggerSFUncertainties.ipynb +++ b/src/binder/calcTriggerSFUncertainties.ipynb @@ -6,7 +6,10 @@ "metadata": {}, "outputs": [], "source": [ + "from __future__ import annotations\n", + "\n", "import pickle\n", + "\n", "from hist.intervals import clopper_pearson_interval" ] }, diff --git a/src/binder/plotJetHTTriggerEfficiencies_1D_Jul15.ipynb b/src/binder/plotJetHTTriggerEfficiencies_1D_Jul15.ipynb index 9fafd87d..6aea2da8 100644 --- a/src/binder/plotJetHTTriggerEfficiencies_1D_Jul15.ipynb +++ b/src/binder/plotJetHTTriggerEfficiencies_1D_Jul15.ipynb @@ -6,12 +6,13 @@ "metadata": {}, "outputs": [], "source": [ + "from __future__ import annotations\n", + "\n", "import pickle\n", + "\n", "import matplotlib.pyplot as plt\n", - "import hist\n", "import mplhep as hep\n", - "import numpy as np\n", - "from coffea import hist as hist2" + "import numpy as np" ] }, { diff --git a/src/binder/plotTF.ipynb b/src/binder/plotTF.ipynb index 127571a6..725979eb 100644 --- a/src/binder/plotTF.ipynb +++ b/src/binder/plotTF.ipynb @@ -6,8 +6,10 @@ "metadata": {}, "outputs": [], "source": [ - "import numpy as np\n", - "import matplotlib.pyplot as plt" + "from __future__ import annotations\n", + "\n", + "import matplotlib.pyplot as plt\n", + "import numpy as np" ] }, { diff --git a/src/binder/plotTrigEffs.ipynb b/src/binder/plotTrigEffs.ipynb index 24eea4cb..3b89b6f0 100644 --- a/src/binder/plotTrigEffs.ipynb +++ b/src/binder/plotTrigEffs.ipynb @@ -7,13 +7,13 @@ "metadata": {}, "outputs": [], "source": [ - "import awkward as ak\n", - "import numpy as np\n", - "from hist import Hist\n", + "from __future__ import annotations\n", + "\n", + "import math\n", + "import pickle\n", + "\n", "import matplotlib.pyplot as plt\n", "import mplhep as hep\n", - "import pickle\n", - "import math\n", "\n", "plt.rcParams.update({\"font.size\": 16})\n", "plt.style.use(hep.style.CMS)" diff --git a/src/runCoffeaCasa.ipynb b/src/runCoffeaCasa.ipynb index 3636a7b0..83608786 100644 --- a/src/runCoffeaCasa.ipynb +++ b/src/runCoffeaCasa.ipynb @@ -20786,6 +20786,8 @@ } ], "source": [ + "from __future__ import annotations\n", + "\n", "from dask.distributed import Client\n", "\n", "client = Client(\"tls://localhost:8786\")\n", @@ -20856,7 +20858,7 @@ "from os import listdir\n", "\n", "# TODO: replace with UL sample once we have it\n", - "with open(\"data/2017_preUL_nano/HHToBBVVToBBQQQQ_cHHH1.txt\", \"r\") as file:\n", + "with open(\"data/2017_preUL_nano/HHToBBVVToBBQQQQ_cHHH1.txt\") as file:\n", " filelist = [\n", " f[:-1].replace(\"/eos/uscms/\", \"root://xcache//\") for f in file.readlines()\n", " ] # need to use xcache redirector at Nebraksa coffea-casa\n", @@ -20872,7 +20874,7 @@ "\n", "for sample in listdir(\"data/2017_UL_nano/\"):\n", " if sample[-4:] == \".txt\" and sample[:-4] not in ignore_samples:\n", - " with open(f\"data/2017_UL_nano/{sample}\", \"r\") as file:\n", + " with open(f\"data/2017_UL_nano/{sample}\") as file:\n", " replace_string = \"/hadoop/cms/\" if \"JetHT\" in sample else \"/eos/uscms/\"\n", " filelist = [\n", " f[:-1].replace(replace_string, \"root://xcache//\") for f in file.readlines()\n", @@ -20887,11 +20889,11 @@ "metadata": {}, "outputs": [], "source": [ - "data_keys = [key for key in fileset.keys() if \"JetHT\" in key]\n", - "qcd_keys = [key for key in fileset.keys() if \"QCD\" in key]\n", - "tt_keys = [key for key in fileset.keys() if \"TT\" in key or \"ST\" in key]\n", - "V_keys = [key for key in fileset.keys() if key[5] == \"W\" or key[5] == \"Z\"]\n", - "hhbbVV4q_keys = [key for key in fileset.keys() if \"HHToBBVVToBBQQQQ\" in key]" + "data_keys = [key for key in fileset if \"JetHT\" in key]\n", + "qcd_keys = [key for key in fileset if \"QCD\" in key]\n", + "tt_keys = [key for key in fileset if \"TT\" in key or \"ST\" in key]\n", + "V_keys = [key for key in fileset if key[5] == \"W\" or key[5] == \"Z\"]\n", + "hhbbVV4q_keys = [key for key in fileset if \"HHToBBVVToBBQQQQ\" in key]" ] }, { @@ -20944,7 +20946,7 @@ "metadata": {}, "outputs": [], "source": [ - "for key in fileset.keys():\n", + "for key in fileset:\n", " if \"JetHT\" not in key:\n", " dname = key.split(\"2017_\")[1]\n", " if dname not in xsecs:\n", @@ -20961,11 +20963,12 @@ "metadata": {}, "outputs": [], "source": [ + "import pickle\n", + "\n", "import awkward as ak\n", - "from coffea.processor import ProcessorABC, column_accumulator\n", "import numpy as np\n", "from coffea.analysis_tools import PackedSelection\n", - "import pickle\n", + "from coffea.processor import ProcessorABC, column_accumulator\n", "\n", "\n", "class bbVVSkimmer(ProcessorABC):\n", @@ -21243,9 +21246,10 @@ } ], "source": [ + "import time\n", + "\n", "from coffea import processor\n", "from coffea.nanoevents import NanoAODSchema\n", - "import time\n", "\n", "# import processors\n", "\n", @@ -21322,7 +21326,7 @@ } ], "source": [ - "from coffea.nanoevents import NanoEventsFactory, NanoAODSchema\n", + "from coffea.nanoevents import NanoAODSchema, NanoEventsFactory\n", "\n", "events = NanoEventsFactory.from_root(\n", " \"root://xcache//store/user/lpcdihiggsboost/cmantill/PFNano/2017_UL_ak15/QCD_HT700to1000_TuneCP5_PSWeights_13TeV-madgraphMLM-pythia8/RunIISummer19UL17Jun23-106X_mc2017_realistic_v6-v2/210623_225007/0000/nano_mc2017_1-10.root\",\n", @@ -21395,7 +21399,6 @@ }, "outputs": [], "source": [ - "import pickle\n", "from os.path import exists\n", "\n", "out_file = \"outPickles/out_skimmed.pickle\" # make sure to change!!\n", diff --git a/src/runJetHTTriggerEfficiencies.ipynb b/src/runJetHTTriggerEfficiencies.ipynb index 300fa086..7297e1bf 100644 --- a/src/runJetHTTriggerEfficiencies.ipynb +++ b/src/runJetHTTriggerEfficiencies.ipynb @@ -9,17 +9,19 @@ }, "outputs": [], "source": [ + "from __future__ import annotations\n", + "\n", + "import json\n", + "import pickle\n", + "\n", "import awkward as ak\n", - "import uproot3\n", - "import uproot\n", - "import numpy as np\n", - "from coffea import processor\n", - "from coffea.nanoevents import NanoEventsFactory, BaseSchema, NanoAODSchema\n", - "from hist import Hist\n", "import matplotlib.pyplot as plt\n", "import mplhep as hep\n", - "import pickle\n", - "import json" + "import numpy as np\n", + "import uproot3\n", + "from coffea import processor\n", + "from coffea.nanoevents import NanoAODSchema\n", + "from hist import Hist" ] }, { @@ -226,7 +228,7 @@ " else:\n", " index_file = f\"../data/pfnanoindex_{year}.json\"\n", "\n", - " with open(index_file, \"r\") as f:\n", + " with open(index_file) as f:\n", " full_fileset = json.load(f)\n", "\n", " fileset = {}\n",