module load apptainer
apptainer pull docker://tvami/madpydel
apptainer shell madpydel_latest.sif
/madgraph/MG5_aMC_v3_5_4/bin/mg5_aMC
display particles
import model /home/vamitamas/GS3Seminar/iDM
generate p p > ap
output GS3_Map200
launch
done
set epsilon 0.01
set map 200
set use_syst = False
done
b) dark photon decayed into bbar
import model /home/vamitamas/GS3Seminar/iDM
generate p p > ap, ap > b b~
output GS3_Map200_0j_decayed_hadronized
launch
shower = Pythia
detector = Delphes
done
set epsilon 0.01
set map 200
set use_syst = False
done
c) dark photon (with ISR) decayed into bbar
import model /home/vamitamas/GS3Seminar/iDM
generate p p > ap, ap > b b~
add process p p > ap j, ap > b b~
output GS3_Map200_1j_decayed_hadronized
launch
shower = Pythia
detector = Delphes
done
set epsilon 0.01
set map 200
set use_syst = False
done
ssh -XY tvami@lxplus.cern.ch
If you are bored of having to write your pwd all the time, use kinit
kinit tvami@CERN.CH
openssl pkcs12 -in myCert.p12 -clcerts -nokeys -out $HOME/.globus/usercert.pem
openssl pkcs12 -in myCert.p12 -nocerts -out $HOME/.globus/userkey.pem
chmod 600 $HOME/.globus/userkey.pem
chmod 600 $HOME/.globus/usercert.pem
Delete the myCert.p12 file created above to avoid security issues.
cmsrel CMSSW_10_6_30
cd CMSSW_10_6_30/src
cmsenv
/cvmfs/cms-ib.cern.ch/nweek-02674/share/lcg/SCRAMV1/V2_2_9_pre12-a20cf2a1acdf91532be7ceae5386205f/bin/scram build -f compile COMPILER=llvm-analyzer SCRAM_IGNORE_MISSING_COMPILERS=yes
scram build code-format
scram build code-checks
scramv1 b runtests
scram build -f compile COMPILER=llvm-analyzer
runTheMatrix.py -n | grep 2017 | grep SingleMuPt10
you will see that the Workflow number is 10007. Therefore you if you finally type
runTheMatrix.py -l 10007 -ne
--> it gives you the cmsdriver commands for this particular workflow, that are actually used in Pull request test and in relvals.
--> you can notice typing the command above that the era actually used is --era Run2_2017 . Inserting the name of this era in the github search leads you to see the config listing the various detector eras to build a 2017 CMS era :
please look here :
https://github.com/cms-sw/cmssw/blob/master/Configuration/Eras/python/Era_Run2_2017_cff.py
where you can see that for the pixel it is phase1Pixel. runTheMatrix.py -l 10824.5,10824.52 -j 0
dqm-access -w -c -f '/PixelPhase1/Phase1_MechanicalView/PXBarrel/digi_occupancy_*_PXLayer_*' -e "match('/ZeroBias1/Run2017G-PromptReco-v.*/DQMIO',dataset)" -s https://cmsweb.cern.ch/dqm/offline/data/json
dqm-access --help | less
The last command can be generalized, e.g.:
If you want to fetch histograms from all periods, use Run2017.* instead of Run2017G (this is a plain regular expression, you can assemble the one you like)
If you are interested in the forward too, I'd suggest making a second query (if you repeat the same command with a different histogram target, the output file in your local directory will be overwritten...)
if you are interested only in some specific run, you could use: -e "run == XYZ and match...."
mkedanlzr
this creates the following files (when calling the analyzer MyAnalyzer)
MyAnalyzer/
| plugins/
| |-- BuildFile.xml
| |-- MyAnalyzer.cc
| python/
| test/
| |-- test_catch2_MyAnalyzer.cc
| |-- test_catch2_main.cc
| |-- BuildFile.xml
Edit MyAnalyzer.cc to do the analyzis.
.ls
The following command in ROOT can be used to create the skeletons code
MuonPhiTree->MakeClass("MuonPhiTreeAnalyzer")
which then creates MuonPhiTreeAnalyzer.h and MuonPhiTreeAnalyzer.C
MuonPhiTree->MakeClass("MuonPhiTreeAnalyzer")
/cvmfs/cms-ib.cern.ch/nweek-02674/share/lcg/SCRAMV1/V2_2_9_pre12-a20cf2a1acdf91532be7ceae5386205f/bin/scram build -f compile COMPILER=llvm-analyzer SCRAM_IGNORE_MISSING_COMPILERS=yes
scram build code-format
scram build code-checks
scramv1 b runtests
scram build -f compile COMPILER=llvm-analyzer
runTheMatrix.py -n | grep 2017 | grep SingleMuPt10
you will see that the Workflow number is 10007. Therefore you if you finally type
runTheMatrix.py -l 10007 -ne
--> it gives you the cmsdriver commands for this particular workflow, that are actually used in Pull request test and in relvals.
--> you can notice typing the command above that the era actually used is --era Run2_2017 . Inserting the name of this era in the github search leads you to see the config listing the various detector eras to build a 2017 CMS era :
please look here :
https://github.com/cms-sw/cmssw/blob/master/Configuration/Eras/python/Era_Run2_2017_cff.py
where you can see that for the pixel it is phase1Pixel. runTheMatrix.py -l 10824.5,10824.52 -j 0
dqm-access -w -c -f '/PixelPhase1/Phase1_MechanicalView/PXBarrel/digi_occupancy_*_PXLayer_*' -e "match('/ZeroBias1/Run2017G-PromptReco-v.*/DQMIO',dataset)" -s https://cmsweb.cern.ch/dqm/offline/data/json
dqm-access --help | less
The last command can be generalized, e.g.:
If you want to fetch histograms from all periods, use Run2017.* instead of Run2017G (this is a plain regular expression, you can assemble the one you like)
If you are interested in the forward too, I'd suggest making a second query (if you repeat the same command with a different histogram target, the output file in your local directory will be overwritten...)
if you are interested only in some specific run, you could use: -e "run == XYZ and match...."
crab checkwrite --site T2_US_Nebraska
crab -report which creates a file in the /results/ that should be added as a lumiMask in the next task:
config.Data.lumiMask = ''
python $DBS3_CLIENT_ROOT/examples/DataOpsScripts/DBS3SetDatasetStatus.py --dataset=/HSCPstau_M_246_Pythia8/tvami-HSCP_stau_Mass247_Pythia8_GENSIM-0446922c9e43366b98c5890088e2c39c/USER --url=https://cmsweb.cern.ch/dbs/prod/phys03/DBSWriter --status=INVALID --recursive=False
config.JobType.pyCfgParams = ['globalTag=80X_mcRun2_...']
config.JobType.pyCfgParams = ['ParamOne=1', 'ParamTwo=2', 'ParamThree=3']
config.Data.allowNonValidInputDataset = True
import os
import glob
from WMCore.Configuration import Configuration
config = Configuration()
config.section_('General')
config.General.transferOutputs = True
config.General.transferLogs = True
config.General.requestName = 'HSCP_tauPrimeCharge2e_Pythia8_TuneCP2_Mass1200_v4_ZPrimeMass6000_GENSIM_2018_13TeV_v4'
config.section_('JobType')
config.JobType.allowUndistributedCMSSW = True
config.JobType.pluginName = 'PrivateMC'
config.JobType.psetName = 'HSCP_tauPrimeCharge2e_Pythia8_TuneCP2_Mass1200_v4_ZPrimeMass6000_cfg.py'
config.JobType.inputFiles = []
config.JobType.outputFiles = ['GENSIM.root']
config.JobType.disableAutomaticOutputCollection = True
config.JobType.maxMemoryMB = 2500
config.section_('Data')
config.Data.outLFNDirBase = '/store/user/tvami/HSCP/'
config.Data.outputPrimaryDataset = 'HSCP_tauPrimeCharge2e_Pythia8_TuneCP2_Mass1200_v4_ZPrimeMass6000'
config.Data.outputDatasetTag = config.General.requestName
config.Data.splitting = 'EventBased'
config.Data.unitsPerJob = 250
config.Data.totalUnits = 200000
config.Data.ignoreLocality = True
config.Data.publication = True
config.section_('Site')
config.Site.storageSite = 'T2_HU_Budapest'
config.Site.whitelist = ['T2_DE_DESY','T2_CH_CERN','T2_IT_Bari','T1_IT_*','T2_US_*', 'T3_US_FNALLPC','T2_HU_Budapest','T2_FR_*', 'T2_UK_London_IC']
import os
import glob
from WMCore.Configuration import Configuration
config = Configuration()
config.section_('General')
config.General.transferOutputs = True
config.General.transferLogs = True
config.General.requestName = 'HSCP_tauPrimeCharge2e_Pythia8_TuneCP2_Mass1200_v4_ZPrimeMass6000_AOD_2018_13TeV_v1'
config.section_('JobType')
config.JobType.allowUndistributedCMSSW = True
config.JobType.pluginName = 'Analysis'
config.JobType.psetName = '4HLT2AOD.py'
config.JobType.outputFiles = ['HSCP_AOD.root']
config.JobType.disableAutomaticOutputCollection = True
config.JobType.maxMemoryMB = 6000
config.JobType.numCores = 8
config.section_('Data')
config.Data.inputDBS = 'phys03'
config.Data.inputDataset = '/HSCP_tauPrimeCharge2e_Pythia8_TuneCP2_Mass1200_v4_ZPrimeMass6000/tvami-crab_HSCP_tauPrimeCharge2e_Pythia8_TuneCP2_Mass1200_v4_ZPrimeMass6000_HLT_2018_13TeV_v1-b403a189a2d057e62e59ed092120c7f4/USER'
config.Data.outLFNDirBase = '/store/user/tvami/HSCP/'
config.Data.splitting = 'FileBased'
config.Data.unitsPerJob = 1
#config.Data.totalUnits = 1
config.Data.ignoreLocality = True
config.Data.publication = True
config.section_('Site')
config.Site.storageSite = 'T2_HU_Budapest'
config.Site.whitelist = ['T2_DE_DESY','T2_CH_CERN','T2_IT_Bari','T1_IT_*','T2_US_*', 'T3_US_FNALLPC','T2_HU_Budapest','T2_FR_*', 'T2_UK_London_IC']
crab checkdataset --dataset /JetMET/Run2022F-PromptNanoAODv11_v1-v2/NANOAOD
dasgoclient --query='file run=315690 dataset=/SingleMuon/tsusa-SingleMuonResubmitted_2018A_final_part_2-8ef74934bac0b9bb265f656f3f38fda1/USER instance=prod/phys03' --limit=0 >> 315690.txt
voms-proxy-init -rfc -voms cms -valid 192:00
xrdcp root://cms-xrd-global.cern.ch//store/data/Run2023D/Cosmics/RAW-RECO/CosmicSP-PromptReco-v1/000/369/811/00000/2ad63d9f-234b-4c68-8c18-49d485d42bc7.root .
edmDumpEventContent 2ad63d9f-234b-4c68-8c18-49d485d42bc7.root > edmFileContent.txt
wget https://raw.githubusercontent.com/silviodonato/usercode/master/print_Stream_Dataset_Path_EventContent.py
hltGetConfiguration run:367902 > hlt.py ### or something like hltGetConfiguration /users/soohwan/HLT_132X/HLT_HI_2023_Run3ppRef_v1_copy/V3 > hlt.py
python3 print_Stream_Dataset_Path_EventContent.py
root root://cms-xrd-global.cern.ch//store/user/tvami/Monitoring/ZeroBias/crab_2017pp_EraE_ZeroBias_Remaining_HighPrio_v1/190114_201832/0000/LA_856.root
gfal-ls -l srm://grid143.kfki.hu:8446/srm/managerv2?SFN=/dpm/kfki.hu/home/cms/phedex//store/user/tsusa/SingleMuon_ALCARECO/2018D_final_part_9/SingleMuon/SingleMuonResubmitted_2018D_final_part_9/200203_223530/0001/SiPixelCalSingleMuon_1177.root
lcg-ls -l -b -D srmv2 --vo cms srm://grid143.kfki.hu:8446/srm/managerv2\?SFN=/dpm/kfki.hu/home/cms/phedex/store/user/tvami/
rsync -azv --update LA* tvami@lxplus.cern.ch:/eos/cms/store/group/dpg_tracker_pixel/comm_pixel/Monitoring/
sed -i 's/.root/.root\ ./g' *
sed -i 's/\/dpm\/kfki.hu\/home\/cms\/phedex\/store\/user\/tvami/xrdcp\ root:\/\/cms-xrd-global.cern.ch\/\/store\/user\/tvami/g' *
process.reconstruction_step = cms.Path(process.reconstruction_trackingOnly)
process.raw2digi_step = cms.Path(process.RawToDigi_pixelOnly)
process.reconstruction_step = cms.Path(process.reconstruction_pixelTrackingOnly)
import glob
readFiles = cms.untracked.vstring()
process.source = cms.Source("PoolSource", fileNames = readFiles)
process.maxEvents = cms.untracked.PSet(input = cms.untracked.int32(-1))
the_files=[]
file_list = glob.glob("/eos/cms/store/group/dpg_tracker_pixel/comm_pixel/Run3_studies/2021/*")
for f in file_list:
the_files.append(f.replace("/eos/cms",""))
readFiles.extend(the_files)
process.GlobalTag.DumpStat = cms.untracked.bool(True)
process.source.bypassVersionCheck = cms.untracked.bool(True)
edmConfigDump your_cfg.py > dumped_cfg.py
process.Timing = cms.Service("Timing",
summaryOnly = cms.untracked.bool(False),
useJobReport = cms.untracked.bool(True)
)
cmsDriver.py RAW2RECO_slpashes21_Run345881
--conditions 120X_dataRun3_Express_v2
--customise Configuration/DataProcessing/RecoTLR.customiseExpress
--data
--datatier RECO
--era Run3
--eventcontent RECO
--filein "file:/afs/cern.ch/user/d/dkonst/public/splashEvents2021/skimSplashEvents2021_run_345881_2_CMSSW_12_0_2_patch2.root"
--fileout "file:step2.root"
--nThreads 8
--no_exec
--number 10
--process reRECO
--python_filename RAW2RECO_slpashes21_Run345881.py
--scenario pp
--step RAW2DIGI,L1Reco,RECO
--customise_commands "process.source.eventsToProcess = cms.untracked.VEventRange('345881:790:18567','345881:782:18370','345881:796:18724','345881:801:18840','345881:1031:24241','345881:1037:24375')"
process.MessageLogger = cms.Service("MessageLogger",
debugModules = cms.untracked.vstring('*'),
cerr = cms.untracked.PSet(
threshold = cms.untracked.string('WARNING')
),
destinations = cms.untracked.vstring('cerr')
edm::LogInfo("PPS").log([&](auto& li) {
li << "[harvester] parameters:\n";
li << "* folder: " << folder_ << "\n";
li << "* sequence:\n";
for (unsigned int i = 0; i < sequence_.size(); i++) {
li << " " << i + 1 << ": " << sequence_[i] << "\n";
}
li << "* overwrite_sh_x: " << std::boolalpha << overwriteShX_ << "\n";
li << "* text_results_path: " << textResultsPath << "\n";
li << "* write_sqlite_results: " << std::boolalpha << writeSQLiteResults_ << "\n";
li << "* x_ali_rel_final_slope_fixed: " << std::boolalpha << xAliRelFinalSlopeFixed_ << "\n";
li << "* y_ali_final_slope_fixed: " << std::boolalpha << yAliFinalSlopeFixed_ << "\n";
li << "* debug: " << std::boolalpha << debug_;
});
git checkout -b BranchName
git pull
git add *
git commit -m "Some text to describe changes"
git merge origin/master
git rebase -i CMSSW_11_2_X
Then in the merge commit editor you mark "pick" your first commit and "squash" your second commit
Then do
git push my-cmssw +HEAD:PixelTemplateMakingUpdate
git push -u origin HEAD
Or
git push -u origin BranchName
git cms-merge-topic 22458
where 22458 is the PR number.
cd CMSSW_11_1_X_2019-12-06-1100/src
cmsenv
git cms-init
git checkout ALCARECOSiPixelCalSingleMuon
git cms-addpkg Calibration/TkAlCaRecoProducers
git cms-addpkg Configuration/EventContent
git cms-addpkg Configuration/PyReleaseValidation
git cms-addpkg Configuration/StandardSequences
git rebase -i CMSSW_11_1_X_2019-12-06-1100
....
there will be conflicts to be merged by hand
git add
git rebase --continue
git push my-cmssw +HEAD:ALCARECOSiPixelCalSingleMuon
git cms-addpkg Calibration/TkAlCaRecoProducers
git cms-addpkg Configuration/EventContent
git cms-addpkg Configuration/StandardSequences
git cms-addpkg Configuration/PyReleaseValidation
git remote add mmusich https://github.com/mmusich/cmssw.git
git fetch mmusich
git checkout updatePixelAlCaRecos_v5
scram b -j
tar -czvf HistosOn2021-03-29_2017Files.tar.bz2 *
tar -xzvf HistosOn2021-03-14.tar.bz2
valgrind --leak-check=full cmsRun ConfigFileName.py
brilcalc lumi -u /fb --begin _fillnumber_ --end _runnumber_
where 4725 is the first fill in 2016, 5659 is the first fill in 2017 and 6469 is the first in 2018.
ln -s /home/tvami/ home.link
git merge origin/master
ls -l | awk ' {if ($5 < 6000000) print "rm " $9} ' '
/Library/Developer/CommandLineTools/SDKs/MacOSX11.1.sdk/usr/include/ = /usr/include
du -hsx * | sort -rh | head -10
locate "*.root" | grep "/data/vami/backup/vami/projects/*" > rootFiles.txt
conddb_import -f frontier://FrontierProd/CMS_CONDITIONS -i 'SiPixelGenErrorDBObject_38T_v1_offline' -c sqlite:SiPixelGenErrorDBObject_38T_v2_offline.db -t 'SiPixelGenErrorDBObject_38T_v2_offline' -b '197749' -e '200960'
conddb list SiPixelDynamicInefficiency_13TeV_50ns_v2_mc
From local file
conddb --db /afs/cern.ch/cms/CAF/CMSALCA/ALCA_TRACKERCALIB/Pixels/PixelDB2015/6SiPixelDynamicInefficiency/siPixelDynamicInefficiency_13TeV_50ns_v2_mc.db list SiPixelDynamicInefficiency_13TeV_50ns_v2_mc
both of them give the hash.
$ conddb dump 1fc23c8994620aa9d47d27baa9f0af4e99e76909 > 1fc23c8994620aa9d47d27baa9f0af4e99e76909.xml
$ conddb dump 5a9e60ee4edc52188da96e384da8fb2bccee30b4 > 5a9e60ee4edc52188da96e384da8fb2bccee30b4.xml
diff 1fc23c8994620aa9d47d27baa9f0af4e99e76909.xml 5a9e60ee4edc52188da96e384da8fb2bccee30b4.xml
condb dump hash
condb --local sqlite.db hash for local sqlite file
sqlite3 TrackerSurfaceDeformations_v9_offline.db
sqlite> UPDATE IOV SET SINCE=1 WHERE SINCE=271866;
sqlite> .q
conddb --db TrackerAlignment_BiasTest_v2_IOV1.db edit TrackerAlignment_BiasTest_v2_IOV1
#!/bin/tcsh
set storage=/afs/cern.ch/work/t/tvami/public/BadComponentAtPCL/CMSSW_11_0_X_2019-06-09-2300/src
foreach era (`/bin/ls $storage | grep Run | grep -v sh`)
#echo $era
foreach subfolder (`/bin/ls $storage/$era | grep 3`)
echo $era $subfolder
echo $storage/$era/$subfolder/promptCalibConditions.db
conddb_import -c sqlite_file:SiPixelQualityFromDbRcd_other_Ultralegacy2018_v0_mc.db -f sqlite_file:$storage/$era/$subfolder/promptCalibConditions.db -i SiPixelQualityFromDbRcd_other -t SiPixelQualityFromDbRcd_other_Ultralegacy2018_v0_mc
conddb_import -c sqlite_file:SiPixelQualityFromDbRcd_stuckTBM_Ultralegacy2018_v0_mc.db -f sqlite_file:$storage/$era/$subfolder/promptCalibConditions.db -i SiPixelQualityFromDbRcd_stuckTBM -t SiPixelQualityFromDbRcd_stuckTBM_Ultralegacy2018_v0_mc
conddb_import -c sqlite_file:SiPixelQualityFromDbRcd_prompt_Ultralegacy2018_v0_mc.db -f sqlite_file:$storage/$era/$subfolder/promptCalibConditions.db -i SiPixelQualityFromDbRcd_prompt -t SiPixelQualityFromDbRcd_prompt_Ultralegacy2018_v0_mc
end
end
ROOT.gROOT.SetBatch(True)
int tmp(){
TFile *_file0 = TFile::Open("Plotter_out_2020_05_21-2.root");
gDirectory->cd("EleEta");
TCanvas *c = (TCanvas * )gDirectory->Get("Signals_Background_2018_SR_Lep_V_b");
TList *l = c->GetListOfPrimitives();
for(const auto&& obj: *(c->GetListOfPrimitives())){
string search_str = "TH1D";
if (!strncmp(obj->ClassName(), search_str.c_str(), search_str.length())){
std::string name = obj->GetName();
TH1D *h = (TH1D *) c->GetPrimitive(name.c_str());
std::cout << "# of entries: " << h->GetEntries() << std::endl;
}
}
return 0;
}
obj = f.Get(dirname+"/"+keyname)
if obj.InheritsFrom("TCanvas"):
can = obj
lista = can.GetListOfPrimitives()
for entry in lista:
if entry.ClassName()=="TH1D":
name = entry.GetName()
histo = can.GetPrimitive(name)
NEntries = histo.GetEntries()
print(dirname+"/"+keyname+' has '+str(NEntries)+' entries')
histo.GetBinContent (1)
This is counted from 1, not 0.