Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • maxt/CEPCSW
  • zyjonah/CEPCSW
  • wanjw03/CEPCSW
  • yudian2002/CEPCSW
  • starr136a/CEPCSW
  • fucd/CEPCSW
  • shuohan/CEPCSW
  • glliu/CEPCSW
  • zhangjinxian/CEPCSW_20250110
  • zhangyz/CEPCSW
  • zhangyang98/cepcsw-official
  • shuxian/CEPCSW
  • lihp29/CEPCSW
  • zhangkl/CEPCSW
  • laipz/CEPCSW
  • lizhihao/CEPCSW
  • yudian2002/cepcsw-otk-endcap-update-01
  • xuchj7/CEPCSW
  • wuchonghao9612/CEPCSW
  • chenye/CEPCSW
  • zhangxm/CEPCSW
  • mengwq/CEPCSW
  • yudian2002/cepcsw-geo-upgrade-v-2
  • fangwx/CEPCSW
  • yudian2002/cepcsw-geo-upgrade
  • jiangxj/CEPCSW
  • yudian2002/cepcsw-otk-end-cap-development
  • guolei/CEPCSW
  • chenbp/CEPCSW
  • dhb112358/CEPCSW
  • tangyb/CEPCSW
  • luhc/CEPCSW
  • songwz/cepcsw-tdr
  • yudian2002/cepcsw-ote-development
  • yudian2002/cepcsw-otb-development
  • dudejing/CEPCSW
  • shexin/CEPCSW
  • sunwy/CEPCSW
  • 1810337/CEPCSW
  • cepcsw/CEPCSW
  • tyzhang/CEPCSW
  • fucd/CEPCSW1
  • xiaolin.wang/CEPCSW
  • wangchu/CEPCSW
  • 201840277/CEPCSW
  • zhaog/CEPCSW
  • shihy/cepcsw-dose
  • myliu/CEPCSW
  • thinking/CEPCSW
  • lihn/CEPCSW
  • 221840222/CEPCSW
  • gongjd1119/CEPCSW
  • tanggy/CEPCSW
  • lintao/CEPCSW
  • guofangyi/cepcsw-release
  • shihy/CEPCSW
  • 1365447033/CEPCSW
  • lizhan/CEPCSW
  • shixin/CEPCSW
  • cepc/CEPCSW
60 results
Show changes
Commits on Source (1231)
Showing
with 1414 additions and 18 deletions
#!/bin/bash
# This is wrapper to run the build.sh on CI
# This is wrapper to run the build.sh or build-xyz.sh on CI
source setup.sh
./build.sh >& mylog.sh &
buildpid=$!
# The build mode is the suffix in build-xyz.sh
export BUILD_CI_MODE=${1}
while ps -p $buildpid 2>/dev/null ; do
sleep 60
done
echo "CEPCSW_LCG_RELEASE: ${CEPCSW_LCG_RELEASE}"
echo "CEPCSW_LCG_PLATFORM: ${CEPCSW_LCG_PLATFORM}"
echo "CEPCSW_LCG_VERSION: ${CEPCSW_LCG_VERSION}"
echo "CEPCSW_BLDTOOL: ${CEPCSW_BLDTOOL}"
tail -n100 mylog.sh
function build-with-log() {
buildpid=
logfile=mylog.txt
if [ "$CEPCSW_LCG_RELEASE" = "KEY4HEP_STACK" ]; then
logfile=mylog-k4.sh
./build-k4.sh >& ${logfile} &
buildpid=$!
else
source setup.sh
./build.sh >& ${logfile} &
buildpid=$!
fi
while ps -p $buildpid 2>/dev/null ; do
sleep 60
done &
echoer=$!
trap 'kill $echoer' 0
wait $buildpid
statuspid=$?
tail -n100 ${logfile}
exit $statuspid
}
function build-with-stdout() {
local build_flags=${BUILD_CI_MODE}
local source_flag=true
# Key4hep stack mode
if [ "$CEPCSW_LCG_RELEASE" = "KEY4HEP_STACK" ]; then
build_flags=k4
source_flag=false
fi
# prepend '-' if necessary
if [ -n "$build_flags" ]; then
build_flags=-${build_flags}
fi
if $source_flag; then
source setup.sh
fi
./build${build_flags}.sh
}
if [ -n "${GITHUB_ACTION}" ]; then
build-with-log
else
build-with-stdout
fi
BootStrap:docker
From:centos:7
%post
yum install -y libicu which make redhat-lsb epel-release libglvnd-devel git mesa-libGLU-devel libXmu-devel motif-devel compat-db47
#!/bin/bash
############################################
# Description:
# Manage the github runners in singularity
# Usage:
# $ ./setup-github-runner new <TOKEN>
# $ ./setup-github-runner start
# Author: Tao Lin <lintao AT ihep.ac.cn>
############################################
#############################################
# Configuration
#############################################
export RUNNER_TOP_DIR=/tmp/$USER/github-runner
export SINGULARITY_BINDPATH=/cvmfs
export RUNNER_REPO=https://github.com/cepc/CEPCSW
[ -d "$RUNNER_TOP_DIR" ] || mkdir $RUNNER_TOP_DIR
#############################################
# Create a new github action runner (gar)
#############################################
function gar-new-id() {
local currentid="$(find $RUNNER_TOP_DIR -maxdepth 1 -name github-runner-\* -type d | rev | cut -d- -f 1 | rev | sort -n | tail -n1)"
if [ -z "$currentid" ]; then
echo 1
else
echo $((currentid+1))
fi
}
function gar-new-name() {
echo github-runner-$(gar-new-id)
}
function gar-download-url() {
echo https://github.com/actions/runner/releases/download/v2.274.2/actions-runner-linux-x64-2.274.2.tar.gz
}
function gar-download-filename() {
echo actions-runner-linux-x64-2.274.2.tar.gz
}
function gar-new() {
local dn=$(gar-new-name)
local fdn=$RUNNER_TOP_DIR/$dn
if [ -d "$fdn" ]; then
echo "ERROR: $dn already exists" 1>&2
exit -1
fi
mkdir $fdn || {
echo "ERROR: Failed to create $fdn" 1>&2
exit -1
}
pushd $RUNNER_TOP_DIR
if [ ! -f "$(gar-download-filename)" ]; then
curl -O -L $(gar-download-url) || exit -1
fi
popd
pushd $fdn
tar xzf $RUNNER_TOP_DIR/$(gar-download-filename) || exit -1
# start singularity instance
singularity instance start ~/github-runner.sif ${dn}
singularity run instance://${dn} ./config.sh --url ${RUNNER_REPO} --token ${token} || exit -1
singularity run instance://${dn} bash -c "./run.sh &"
popd
}
function new() {
token=$1; shift
if [ -z "$token" ]; then
echo "Please pass the token to this script" 1>&2
exit -1
fi
gar-new
}
#############################################
# Start github action runners (gar)
#############################################
function gar-lists() {
find $RUNNER_TOP_DIR -maxdepth 1 -name github-runner-\* -type d -exec basename {} \;
}
function gar-check() {
local gar=$1;
local result=$(singularity instance list $gar | grep $gar)
if [ -n "$result" ]; then
echo Y
else
echo N
fi
}
function gar-start() {
local gar=$1;
local isrunning=$(gar-check $gar)
if [ "$isrunning" = "Y" ]; then
echo "WARNING: $gar is already running. skip it."
return
fi
pushd $RUNNER_TOP_DIR/$gar
singularity instance start ~/github-runner.sif ${gar}
singularity run instance://${gar} bash -c "./run.sh &"
popd
}
function start() {
local gars="$*"
if [ -z "$gars" ]; then
echo "All the github action runners will be started"
gars="$(gar-lists)"
fi
local gar
for gar in $gars; do
gar-start $gar
done
}
#############################################
# Command line options
#############################################
cmd=$1; shift
if [ -z "$cmd" ]; then
echo "Please specify the command to be invoked" 1>&2
exit -1
fi
case $cmd in
new)
new $*
;;
start)
start $*
;;
*)
echo "Unknown command '$cmd'" 1>&2
;;
esac
#!/bin/bash
############################################
# Description:
# Manage the gitlab runners
# Usage:
# $ ./setup-gitlab-runner new <TOKEN>
# $ ./setup-gitlab-runner start
#
# Register in crontab:
#
# */10 * * * * $HOME/setup-github-runner.sh start >& /tmp/$USER/github-runner/start.log
#
# Author: Tao Lin <lintao AT ihep.ac.cn>
############################################
#############################################
# Configuration
#############################################
export RUNNER_TOP_DIR=/tmp/$USER/gitlab-runner
export SINGULARITY_BINDPATH=/cvmfs
export RUNNER_URL=https://code.ihep.ac.cn
[ -d "$RUNNER_TOP_DIR" ] || mkdir $RUNNER_TOP_DIR
#############################################
# Create a new gitlab runner (glr)
#############################################
# ./gitlab-runner register --url https://code.ihep.ac.cn --token XXXXXX
function glr-preq() {
# if $HOME/gitlab-runner exists
if [ -f "$HOME/gitlab-runner" ]; then
cp $HOME/gitlab-runner .
else
curl -L --output gitlab-runner https://gitlab-runner-downloads.s3.amazonaws.com/latest/binaries/gitlab-runner-linux-amd64
fi
chmod +x gitlab-runner
}
function glr-new() {
local runner_url=$1; shift
local token=$1; shift
local executor=${1:-shell}; shift
local shell=${1:-bash}; shift
pushd $RUNNER_TOP_DIR
# check if gitlab-runner exists
if [ ! -f gitlab-runner ]; then
glr-preq
fi
./gitlab-runner register --url $runner_url --token $token --executor $executor --shell $shell
popd
}
function new() {
local token=$1; shift
if [ -z "$token" ]; then
echo "Please pass the token to this script" 1>&2
exit -1
fi
glr-new $RUNNER_URL $token
}
#############################################
# Create a new gitlab runner (glr)
#############################################
function glr-start() {
local glr=gitlab-runner
pushd $RUNNER_TOP_DIR
apptainer instance start ~/github-runner.sif ${glr}
apptainer run instance://${glr} bash -c "./gitlab-runner run -c ./config.toml &"
popd
}
function start() {
glr-start
}
#############################################
# Command line options
#############################################
cmd=$1; shift
if [ -z "$cmd" ]; then
echo "Please specify the command to be invoked" 1>&2
exit -1
fi
case $cmd in
new)
new $*
;;
start)
start $*
;;
*)
echo "Unknown command '$cmd'" 1>&2
;;
esac
# This is a basic workflow to help you get started with Actions
name: CI
# Controls when the action will run.
on:
# Triggers the workflow on push or pull request events but only for the master branch
push:
branches: [ master ]
pull_request:
branches: [ master ]
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
# A workflow run is made up of one or more jobs that can run sequentially or in parallel
jobs:
# This workflow contains a single job called "build"
build:
# The type of runner that the job will run on
runs-on: self-hosted
strategy:
matrix:
LCG_RELEASE:
- LCG_EXTERNAL
- KEY4HEP_STACK
CEPCSW_BLDTOOL:
- ninja
# - make
# Steps represent a sequence of tasks that will be executed as part of the job
steps:
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
- uses: actions/checkout@v2
# Runs a single command using the runners shell
- name: Run a one-line script
run: echo Hello, world!
# Runs a set of commands using the runners shell
- name: Run a multi-line script
run: |
echo Add other actions to build,
echo test, and deploy your project.
- name: Run the build script
run: |
pwd
bash ./.build.ci.sh
env:
LCG_RELEASE: ${{matrix.LCG_RELEASE}}
CEPCSW_BLDTOOL: ${{matrix.CEPCSW_BLDTOOL}}
build.*
build
spack*
./Generator/output/
./Generator/options/
InstallArea/
venv
build:
##############################################################################
# CI for CEPCSW at IHEP GitLab
##############################################################################
workflow:
rules:
# These 3 rules from https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/gitlab/ci/templates/Workflows/MergeRequest-Pipelines.gitlab-ci.yml
# Run on merge requests
- if: $CI_MERGE_REQUEST_IID
- if: $CI_PIPELINE_SOURCE == 'merge_request_event'
# Run on tags
- if: $CI_COMMIT_TAG
# Run when called from an upstream pipeline https://docs.gitlab.com/ee/ci/pipelines/downstream_pipelines.html?tab=Multi-project+pipeline#use-rules-to-control-downstream-pipeline-jobs
- if: $CI_PIPELINE_SOURCE == 'pipeline'
- if: $CI_PIPELINE_SOURCE == 'parent-child'
# Run on commits to the default branch
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
# The last rule above blocks manual and scheduled pipelines on non-default branch. The rule below allows them:
- if: $CI_PIPELINE_SOURCE == "schedule"
# Run if triggered from Web using 'Run Pipelines'
- if: $CI_PIPELINE_SOURCE == "web"
# Run if triggered from WebIDE
- if: $CI_PIPELINE_SOURCE == "webide"
stages:
- build
##############################################################################
# Template for Build and Test
##############################################################################
# Due to cmake/ctest will hardcode the path in build directory,
# the test job will be failed if it is executed on a different nodes.
# Therefore, put the build script and test script together.
.envvar_template:
variables:
CEPCSW_LCG_RELEASE: LCG
CEPCSW_LCG_PLATFORM: x86_64-el9-gcc11-opt
CEPCSW_LCG_VERSION: 105.0.0
# for k8s
.build_template_k8s:
extends: .envvar_template
image: cepc/cepcsw-cvmfs:el9
stage: build
before_script:
- source /cvmfs/cepcsw.ihep.ac.cn/prototype/setup.sh
tags:
- k8s # using k8s as runner
script:
- mkdir build
- cd build
- cmake ..
- make
- sed -i 's%^CVMFS_HTTP_PROXY=.*%CVMFS_HTTP_PROXY=http://squid-01.ihep.ac.cn:3128%' /etc/cvmfs/default.local
- for repo in sft.cern.ch geant4.cern.ch cepcsw.ihep.ac.cn; do [ -d "/cvmfs/$repo" ] || mkdir /cvmfs/$repo; sudo mount -t cvmfs $repo /cvmfs/$repo; done
- bash ./.build.ci.sh
- bash ./.test.ci.sh
##############################################################################
# Build & Test in k8s (LCG)
##############################################################################
build:lcg:el9:k8s:
extends: .build_template_k8s
rules:
- if: $CI_PIPELINE_SOURCE == 'merge_request_event'
when: manual
artifacts:
paths:
- build/
reports:
junit: build.${CEPCSW_LCG_VERSION}.${CEPCSW_LCG_PLATFORM}/cepcsw-ctest-result.xml
##############################################################################
# Build the docs
##############################################################################
build:docs:k8s:
extends: .build_template_k8s
image: sphinxdoc/sphinx
script:
- bash build-docs.sh
artifacts:
paths:
- docs/build/html/
rules:
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
changes:
- docs/**/*
# This is used to merge the contributions of account with different email.
#
# git shortlog -se
#
# Sorted by surfname
Cao Guangjie <gjcao@lxslc605.ihep.ac.cn>
Cao Guangjie <gjcao@lxslc605.ihep.ac.cn> Cao Guangjie <gjcao@lxslc611.ihep.ac.cn>
Cao Guangjie <gjcao@lxslc605.ihep.ac.cn> Cao Guangjie <gjcao@lxslc614.ihep.ac.cn>
Wenxing Fang <fangwx@ihep.ac.cn>
Wenxing Fang <fangwx@ihep.ac.cn> wenxingfang <fangwx@ihep.ac.cn>
Wenxing Fang <fangwx@ihep.ac.cn> Fang Wenxing <wxfang@lxslc608.ihep.ac.cn>
Wenxing Fang <fangwx@ihep.ac.cn> Fang Wenxing <wxfang@lxslc609.ihep.ac.cn>
Wenxing Fang <fangwx@ihep.ac.cn> Fang Wenxing <wxfang@lxslc613.ihep.ac.cn>
Wenxing Fang <fangwx@ihep.ac.cn> Fang Wenxing <wxfang@lxslc614.ihep.ac.cn>
Wenxing Fang <fangwx@ihep.ac.cn> Fang Wenxing <wxfang@lxslc703.ihep.ac.cn>
Wenxing Fang <fangwx@ihep.ac.cn> Fang Wenxing <wxfang@lxslc705.ihep.ac.cn>
Wenxing Fang <fangwx@ihep.ac.cn> Fang Wenxing <wxfang@lxslc706.ihep.ac.cn>
Wenxing Fang <fangwx@ihep.ac.cn> Fang Wenxing <wxfang@lxslc708.ihep.ac.cn>
Wenxing Fang <fangwx@ihep.ac.cn> Fang Wenxing <wxfang@lxslc709.ihep.ac.cn>
Wenxing Fang <fangwx@ihep.ac.cn> Fang Wenxing <wxfang@lxslc711.ihep.ac.cn>
Wenxing Fang <fangwx@ihep.ac.cn> Fang Wenxing <wxfang@lxslc712.ihep.ac.cn>
Wenxing Fang <fangwx@ihep.ac.cn> Fang Wenxing <wxfang@lxslc713.ihep.ac.cn>
Wenxing Fang <fangwx@ihep.ac.cn> Fang Wenxing <wxfang@lxslc714.ihep.ac.cn>
Wenxing Fang <fangwx@ihep.ac.cn> Fang Wenxing <wxfang@lxslc716.ihep.ac.cn>
Wenxing Fang <fangwx@ihep.ac.cn> wenxingfang <1473717798@qq.com>
Chengdong Fu <fucd@ihep.ac.cn>
Chengdong Fu <fucd@ihep.ac.cn> fucd <fucd@ihep.ac.cn>
Tao Lin <lintao@ihep.ac.cn>
Tao Lin <lintao@ihep.ac.cn> Tao Lin <lintao51@gmail.com>
Tao Lin <lintao@ihep.ac.cn> lintao <lintao51@gmail.com>
Tao Lin <lintao@ihep.ac.cn> Tao Lin <831611+mirguest@users.noreply.github.com>
Mengyao Liu <myliu@ihep.ac.cn>
Mengyao Liu <myliu@ihep.ac.cn> myliu <201916234@mail.sdu.edu.cn>
Linghui Wu <wulh@ihep.ac.cn>
Linghui Wu <wulh@ihep.ac.cn> wu linghui <wulh@lxslc716.ihep.ac.cn>
Zhang Yao <zhangyao@ihep.ac.cn>
Zhang Yao <zhangyao@ihep.ac.cn> ihepzhangyao <ihepyzhang@gmail.com>
Dan Yu <danerdaner412@gmail.com>
Dan Yu <danerdaner412@gmail.com> danerdaner412@gmail.com <yudan@lxslc703.ihep.ac.cn>
Thomas Madlener <thomas.madlener@desy.de>
Thomas Madlener <thomas.madlener@desy.de> tmadlener <thomas.madlener@desy.de>
Hao Zeng <hao.zeng@cern.ch>
Hao Zeng <hao.zeng@cern.ch> hazeng <hao.zeng@cern.ch>
Hao Zeng <hao.zeng@cern.ch> zenghao <1251935595@qq.com>
Mingrui Zhao <mingrui.zhao@mail.labz0.org>
Mingrui Zhao <mingrui.zhao@mail.labz0.org> Mingrui <mingrui.zhao@mail.labz0.org>
Zou Jiaheng <zoujh@ihep.ac.cn>
Zou Jiaheng <zoujh@ihep.ac.cn> zoujh <zoujh@ihep.ac.cn>
version: "2"
build:
os: "ubuntu-22.04"
tools:
python: "3.10"
python:
install:
- requirements: docs/requirements.txt
sphinx:
configuration: docs/source/conf.py
#!/bin/bash
# Description:
# Run the tests using ctest
#
# Author:
# Tao Lin <lintao AT ihep.ac.cn>
##############################################################################
# Utilities
##############################################################################
function build-dir() {
local blddir=build
if [ -n "${CEPCSW_BLDTOOL}" ]; then
blddir=${blddir}.${CEPCSW_BLDTOOL}
fi
# If detect the extra env var, append it to the build dir
if [ -n "${CEPCSW_LCG_VERSION}" ]; then
blddir=${blddir}.${CEPCSW_LCG_VERSION}
fi
if [ -n "${CEPCSW_LCG_PLATFORM}" ]; then
blddir=${blddir}.${CEPCSW_LCG_PLATFORM}
fi
echo $blddir
}
function junit-output() {
local default=cepcsw-ctest-result.xml
echo ${CEPCSW_JUNIT_OUTPUT:-$default}
}
##############################################################################
# Main
##############################################################################
echo "CEPCSW_LCG_RELEASE: ${CEPCSW_LCG_RELEASE}"
echo "CEPCSW_LCG_PLATFORM: ${CEPCSW_LCG_PLATFORM}"
echo "CEPCSW_LCG_VERSION: ${CEPCSW_LCG_VERSION}"
echo "CEPCSW_BLDTOOL: ${CEPCSW_BLDTOOL}"
source setup.sh
# reconfigure if directory change
pushd $(build-dir)
cmake ..
popd
ctest --output-junit $(junit-output) --test-dir $(build-dir)
......@@ -10,6 +10,7 @@ language: cpp
env:
matrix:
- COMPILER=gcc; LCG_RELEASE=LCG_96c_LS; STANDARD=17; COMPILER_VERSION=gcc8;
- COMPILER=gcc; LCG_RELEASE=KEY4HEP_STACK; STANDARD=17; COMPILER_VERSION=gcc8;
before_install:
- wget --no-check-certificate https://ecsft.cern.ch/dist/cvmfs/cvmfs-release/cvmfs-release-latest_all.deb
......@@ -25,7 +26,7 @@ before_install:
- echo "CVMFS_HTTP_PROXY=DIRECT" | sudo tee -a /etc/cvmfs/default.local > /dev/null
- echo "CVMFS_CACHE_BASE='/var/lib/cvmfs'" | sudo tee -a /etc/cvmfs/default.local > /dev/null
- echo "CVMFS_FORCE_SIGNING='yes'" | sudo tee -a /etc/cvmfs/default.local > /dev/null
- echo "CVMFS_REPOSITORIES='sft.cern.ch,sw-nightlies.hsf.org,cepcsw.ihep.ac.cn,container.ihep.ac.cn'" | sudo tee -a /etc/cvmfs/default.local > /dev/null
- echo "CVMFS_REPOSITORIES='sft.cern.ch,sft-nightlies.cern.ch,sw.hsf.org,sw-nightlies.hsf.org,cepcsw.ihep.ac.cn,container.ihep.ac.cn'" | sudo tee -a /etc/cvmfs/default.local > /dev/null
- echo "CVMFS_SEND_INFO_HEADER=no" | sudo tee -a /etc/cvmfs/default.local > /dev/null
- cat /etc/cvmfs/default.local
- # change wrt dd4hep setup: don't manually mount cvmfs folders
......@@ -35,15 +36,20 @@ before_install:
- sudo mkdir -p /cvmfs/sft.cern.ch
- sudo mkdir -p /cvmfs/sft-nightlies.cern.ch
- sudo mkdir -p /cvmfs/geant4.cern.ch
- sudo mkdir -p /cvmfs/sw.hsf.org
- sudo mkdir -p /cvmfs/sw-nightlies.hsf.org
- sudo mkdir -p /cvmfs/cepcsw.ihep.ac.cn
- sudo mkdir -p /cvmfs/container.ihep.ac.cn
- ls /cvmfs/sft.cern.ch
- ls /cvmfs/sft-nightlies.cern.ch
- ls /cvmfs/geant4.cern.ch
- ls /cvmfs/sw.hsf.org
- ls /cvmfs/sw-nightlies.hsf.org
- ls /cvmfs/cepcsw.ihep.ac.cn
- ls /cvmfs/container.ihep.ac.cn
- export CVMFS_REPOS="-v /cvmfs/sft.cern.ch:/cvmfs/sft.cern.ch"
- export CVMFS_REPOS="${CVMFS_REPOS} -v /cvmfs/sft-nightlies.cern.ch:/cvmfs/sft-nightlies.cern.ch"
- export CVMFS_REPOS="${CVMFS_REPOS} -v /cvmfs/sw.hsf.org:/cvmfs/sw.hsf.org"
- export CVMFS_REPOS="${CVMFS_REPOS} -v /cvmfs/sw-nightlies.hsf.org:/cvmfs/sw-nightlies.hsf.org"
- export CVMFS_REPOS="${CVMFS_REPOS} -v /cvmfs/geant4.cern.ch:/cvmfs/geant4.cern.ch"
- export CVMFS_REPOS="${CVMFS_REPOS} -v /cvmfs/cepcsw.ihep.ac.cn:/cvmfs/cepcsw.ihep.ac.cn"
......
import os, sys
from Gaudi.Configuration import *
########### k4DataSvc ####################
from Configurables import k4DataSvc
podioevent = k4DataSvc("EventDataSvc", input="track.root")
##########################################
########## CEPCSWData #################
cepcswdatatop ="/cvmfs/cepcsw.ihep.ac.cn/prototype/releases/data/latest"
#######################################
########## Podio Input ###################
from Configurables import PodioInput
inp = PodioInput("InputReader")
inp.collections = [ "CompleteTracks",
"CompleteTracksParticleAssociation",
"RecTofCollection",
"DndxTracks" ]
##########################################
from Configurables import AnalysisPIDAlg
anaPID = AnalysisPIDAlg("AnalysisPIDAlg")
anaPID.OutputFile = "./pid.root"
##############################################################################
# POD I/O
##############################################################################
########################################
from Configurables import ApplicationMgr
ApplicationMgr(
TopAlg=[inp, anaPID ],
EvtSel="NONE",
EvtMax=-1,
ExtSvc=[podioevent],
#OutputLevel=DEBUG
)
# gaudi_add_header_only_library(AnalysisPIDLib)
# Modules
gaudi_add_module(AnalysisPID
SOURCES src/AnalysisPIDAlg.cpp
LINK Gaudi::GaudiAlgLib
Gaudi::GaudiKernel
DataHelperLib
DetSegmentation
DetInterface
${GSL_LIBRARIES}
${GEAR_LIBRARIES}
${LCIO_LIBRARIES}
EDM4CEPC::edm4cepc EDM4CEPC::edm4cepcDict
EDM4HEP::edm4hep EDM4HEP::edm4hepDict
k4FWCore::k4FWCore
)
target_include_directories(AnalysisPID PUBLIC
$<BUILD_INTERFACE:${CMAKE_CURRENT_LIST_DIR}>/include
$<INSTALL_INTERFACE:${CMAKE_INSTALL_INCLUDEDIR}>)
install(TARGETS AnalysisPID
EXPORT CEPCSWTargets
RUNTIME DESTINATION "${CMAKE_INSTALL_BINDIR}" COMPONENT bin
LIBRARY DESTINATION "${CMAKE_INSTALL_LIBDIR}" COMPONENT shlib
COMPONENT dev)
#include "AnalysisPIDAlg.h"
#include "GaudiKernel/DataObject.h"
#include "GaudiKernel/IHistogramSvc.h"
#include "GaudiKernel/MsgStream.h"
#include "GaudiKernel/SmartDataPtr.h"
#include "DetInterface/IGeomSvc.h"
#include "DataHelper/HelixClass.h"
#include "DD4hep/Detector.h"
#include "DD4hep/DD4hepUnits.h"
#include "CLHEP/Units/SystemOfUnits.h"
#include <cmath>
#include "UTIL/ILDConf.h"
#include "DetIdentifier/CEPCConf.h"
#include "UTIL/CellIDEncoder.h"
using namespace edm4hep;
DECLARE_COMPONENT( AnalysisPIDAlg )
//------------------------------------------------------------------------------
AnalysisPIDAlg::AnalysisPIDAlg( const std::string& name, ISvcLocator* pSvcLocator )
: Algorithm( name, pSvcLocator ) {
declareProperty("CompleteTracks", _FultrkCol, "handler of the input complete track collection");
declareProperty("CompleteTracksParticleAssociation", _FultrkParAssCol, "handler of the input track particle association collection");
declareProperty("DndxTracks", _inDndxColHdl, "handler of the collection of dN/dx tracks");
declareProperty("RecTofCollection", _inTofColHdl, "handler of the collection of tof tracks");
// output
declareProperty("OutputFile", m_outputFile = "pid.root", "output file name");
}
//------------------------------------------------------------------------------
StatusCode AnalysisPIDAlg::initialize(){
info() << "Booking Ntuple" << endmsg;
m_file = new TFile(m_outputFile.value().c_str(),"RECREATE");
m_tree = new TTree("pid","pid");
m_tree->Branch("Nevt",&_nEvt,"Nevt/I");
m_tree->Branch("Ndndxtrk",&Ndndxtrk,"Ndndxtrk/I");
m_tree->Branch("Ntoftrk",&Ntoftrk,"Ntoftrk/I");
m_tree->Branch("Nfullass",&Nfullass,"Nfullass/I");
m_tree->Branch("Nfulltrk",&Nfulltrk,"Nfulltrk/I");
m_tree->Branch("tpcidx",&tpcidx);
m_tree->Branch("tofidx",&tofidx);
m_tree->Branch("matchedtpc",&matchedtpc);
m_tree->Branch("matchedtof",&matchedtof);
m_tree->Branch("truthidx",&truthidx);
m_tree->Branch("tof_chi2s",&tof_chi2s);
m_tree->Branch("tof_chis",&tof_chis);
m_tree->Branch("tof_expt",&tof_expt);
m_tree->Branch("tof_meast",&tof_meast);
m_tree->Branch("tof_measterr",&tof_measterr);
m_tree->Branch("tpc_chi2s",&tpc_chi2s);
m_tree->Branch("tpc_chis",&tpc_chis);
m_tree->Branch("tpc_expdndxs",&tpc_expdndxs);
m_tree->Branch("tpc_measdndx",&tpc_measdndx);
m_tree->Branch("tpc_measdndxerr",&tpc_measdndxerr);
m_tree->Branch("tot_chi2s",&tot_chi2s);
m_tree->Branch("recoPDG",&recoPDG);
m_tree->Branch("tpcrecoPDG",&tpcrecoPDG);
m_tree->Branch("tofrecoPDG",&tofrecoPDG);
//gen trk parameters
m_tree->Branch("genpx",&genpx);
m_tree->Branch("genpy",&genpy);
m_tree->Branch("genpz",&genpz);
m_tree->Branch("genE",&genE);
m_tree->Branch("genp",&genp);
m_tree->Branch("genM",&genM);
m_tree->Branch("genphi",&genphi);
m_tree->Branch("gentheta",&gentheta);
m_tree->Branch("endx",&endx);
m_tree->Branch("endy",&endy);
m_tree->Branch("endz",&endz);
m_tree->Branch("endr",&endr);
m_tree->Branch("PDG",&PDG);
m_tree->Branch("genstatus",&genstatus);
m_tree->Branch("simstatus",&simstatus);
m_tree->Branch("isdecayintrker",&isdecayintrker);
m_tree->Branch("iscreatedinsim",&iscreatedinsim);
m_tree->Branch("isbackscatter",&isbackscatter);
m_tree->Branch("isstopped",&isstopped);
_nEvt = 0;
return StatusCode::SUCCESS;
}
//------------------------------------------------------------------------------
StatusCode AnalysisPIDAlg::execute(){
const edm4hep::TrackCollection* trkCol = nullptr;
const edm4hep::RecDqdxCollection* dndxCols = nullptr;
const edm4hep::RecTofCollection* tofCols = nullptr;
const edm4hep::MCRecoTrackParticleAssociationCollection* fultrkparassCols = nullptr;
ClearVars();
try {
trkCol = _FultrkCol.get();
}
catch ( GaudiException &e ) {
debug() << "Complete track collection " << _FultrkCol.fullKey() << " is unavailable in event " << _nEvt << endmsg;
Nfulltrk = -1;
}
if ( trkCol->size() == 0 ) {
debug() << "No full track found in event " << _nEvt << endmsg;
Nfulltrk = 0;
}
else{
Nfulltrk = trkCol->size();
}
try {
fultrkparassCols = _FultrkParAssCol.get();
}
catch ( GaudiException &e ) {
debug() << "Complete track particle association collection " << _FultrkParAssCol.fullKey() << " is unavailable in event " << _nEvt << endmsg;
Nfullass = -1;
m_tree->Fill();
_nEvt++;
return StatusCode::SUCCESS;
}
try {
dndxCols = _inDndxColHdl.get();
}
catch ( GaudiException &e ) {
debug() << "DndxTrack collection " << _inDndxColHdl.fullKey() << " is unavailable in event " << _nEvt << endmsg;
Ndndxtrk = -1;
}
if ( dndxCols->size() == 0 ) {
debug() << "No dndx track found in event " << _nEvt << endmsg;
Ndndxtrk = 0;
}
else{
Ndndxtrk = dndxCols->size();
}
try {
tofCols = _inTofColHdl.get();
}
catch ( GaudiException &e ) {
debug() << "TofTrack collection " << _inTofColHdl.fullKey() << " is unavailable in event " << _nEvt << endmsg;
Ntoftrk = -1;
}
if ( tofCols->size() == 0 ) {
debug() << "No tof track found in event " << _nEvt << endmsg;
Ntoftrk = 0;
}
else{
Ntoftrk = tofCols->size();
}
if ( fultrkparassCols->size() == 0 ) {
debug() << "No full track particle association found in event " << _nEvt << endmsg;
Nfullass = 0;
m_tree->Fill();
_nEvt++;
return StatusCode::SUCCESS;
}
else{
Nfullass = fultrkparassCols->size();
}
info() << "normal eventID: " << _nEvt << endmsg;
for(auto track : *trkCol){
//truth association match
max_weight = -999.;
max_weight_idx = -1;
ass_idx = 0;
for (auto ass : *fultrkparassCols) {
if (ass.getRec() == track) {
weight = ass.getWeight();
if (weight > max_weight) {
max_weight = weight;
max_weight_idx = ass_idx;
}
}
ass_idx++;
}
truthidx.push_back(max_weight_idx);
// if (max_weight_idx < 0) continue;
p1=fultrkparassCols->at(max_weight_idx).getSim().getMomentum()[0];
p2=fultrkparassCols->at(max_weight_idx).getSim().getMomentum()[1];
p3=fultrkparassCols->at(max_weight_idx).getSim().getMomentum()[2];
genpx.push_back(p1);
genpy.push_back(p2);
genpz.push_back(p3);
genp.push_back(std::sqrt(p1*p1 + p2*p2 + p3*p3));
genE.push_back(fultrkparassCols->at(max_weight_idx).getSim().getEnergy());
genM.push_back(fultrkparassCols->at(max_weight_idx).getSim().getMass());
genphi.push_back(std::atan2(p2,p1));
gentheta.push_back(std::acos(p3/std::sqrt(p1*p1 + p2*p2 + p3*p3)));
x1=fultrkparassCols->at(max_weight_idx).getSim().getEndpoint()[0];
y1=fultrkparassCols->at(max_weight_idx).getSim().getEndpoint()[1];
endx.push_back(x1);
endy.push_back(y1);
endz.push_back(fultrkparassCols->at(max_weight_idx).getSim().getEndpoint()[2]);
endr.push_back(std::sqrt(x1*x1 + y1*y1));
PDG.push_back(fultrkparassCols->at(max_weight_idx).getSim().getPDG());
genstatus.push_back(fultrkparassCols->at(max_weight_idx).getSim().getGeneratorStatus());
simstatus.push_back(fultrkparassCols->at(max_weight_idx).getSim().getSimulatorStatus());
isdecayintrker.push_back(fultrkparassCols->at(max_weight_idx).getSim().isDecayedInTracker());//getSimulatorStatus().isDecayInTracker();
iscreatedinsim.push_back(fultrkparassCols->at(max_weight_idx).getSim().isCreatedInSimulation());
isbackscatter.push_back(fultrkparassCols->at(max_weight_idx).getSim().isBackscatter());
isstopped.push_back(fultrkparassCols->at(max_weight_idx).getSim().isStopped());
//find corresponding dndx track
edm4hep::RecDqdx dndxtrk;
dndx_index = -1;
matched1 = false;
for(int i=0; i<Ndndxtrk; i++){
if ( dndxCols->at(i).getTrack() == track ){
dndxtrk = dndxCols->at(i);
dndx_index = i;
matched1 = true;
break;
}
}
tpcidx.push_back(dndx_index);
tpc_chi2s_1.clear();tpc_expdndxs_1.clear();tpc_chis_1.clear();
tpcdndx=-1;tpcdndxerr=-1;
if( matched1 ) {
tpcdndx = dndxtrk.getDQdx().value;
tpcdndxerr = dndxtrk.getDQdx().error;
debug()<<"tpc_measdndx = "<<dndxtrk.getDQdx().value<<endmsg;
for (int idx=0;idx<5;idx++) {
double tpc_chi2 = dndxtrk.getHypotheses(idx).chi2;
double tpc_expdndx = dndxtrk.getHypotheses(idx).expected;
double tpc_chi = ( tpcdndx - tpc_expdndx ) / tpcdndxerr;
tpc_chi2s_1.push_back(tpc_chi2);
tpc_chis_1.push_back(tpc_chi);
tpc_expdndxs_1.push_back(tpc_expdndx);
debug()<<" idx : "<< idx <<" tpc_chi2 : "<< tpc_chi2 <<endmsg;
}
}
else{
tpc_chi2s_1={-999,-999,-999,-999,-999};
tpc_chis_1={-999,-999,-999,-999,-999};
tpc_expdndxs_1={-1,-1,-1,-1,-1};
}
tpc_measdndx.push_back(tpcdndx);
tpc_measdndxerr.push_back(tpcdndxerr);
matchedtpc.push_back(matched1);
tpc_chi2s.push_back(tpc_chi2s_1);
tpc_chis.push_back(tpc_chis_1);
tpc_expdndxs.push_back(tpc_expdndxs_1);
//find corresponding tof track
edm4hep::RecTof toftrk;
tof_index = -1;
matched2 = false;
for(int i=0; i<Ntoftrk; i++){
if ( tofCols->at(i).getTrack() == track ){
toftrk = tofCols->at(i);
tof_index = i;
matched2 = true;
break;
}
}
tofidx.push_back(tof_index);
tof_chi2s_1.clear();tof_expt_1.clear();tof_chis_1.clear();
toft=-1;tofterr=-1;
if ( matched2 ) {
toft = toftrk.getTime();
std::array<float, 5> tofexpts = toftrk.getTimeExp();
tofterr = toftrk.getSigma();
debug()<<"tof_meast = "<<toftrk.getTime()<<endmsg;
for (int idx=0;idx<5;idx++){
double tof_chi = ( toft - tofexpts[idx] ) / tofterr;
double tof_chi2 = tof_chi*tof_chi;
tof_chi2s_1.push_back(tof_chi2);
tof_chis_1.push_back(tof_chi);
tof_expt_1.push_back(tofexpts[idx]);
debug()<<" idx : "<< idx <<" tof_chi2 : "<< tof_chi2 <<endmsg;
}//end loop over masses
}
else{
tof_chi2s_1={-999,-999,-999,-999,-999};
tof_chis_1={-999,-999,-999,-999,-999};
tof_expt_1={-1,-1,-1,-1,-1};
}
tof_meast.push_back(toft);
tof_measterr.push_back(tofterr);
matchedtof.push_back(matched2);
tof_chi2s.push_back(tof_chi2s_1);
tof_chis.push_back(tof_chis_1);
tof_expt.push_back(tof_expt_1);
tot_chi2s_1.clear();
recpdg = 9999;tpcrecpdg = 9999;tofrecpdg = 9999;
if(matched1){
minchi2idx = std::distance(tpc_chi2s_1.begin(), std::min_element(tpc_chi2s_1.begin(), tpc_chi2s_1.end()));
tpcrecpdg = PDGIDs.at(minchi2idx);
}
if(matched2){
minchi2idx = std::distance(tof_chi2s_1.begin(), std::min_element(tof_chi2s_1.begin(), tof_chi2s_1.end()));
tofrecpdg = PDGIDs.at(minchi2idx);
}
if(matched1 || matched2){
if(matched1 && matched2){
for(int i=0;i<5;i++){
tot_chi2s_1.push_back(tpc_chi2s_1[i]+tof_chi2s_1[i]);
}
}
if(matched1 && !matched2) tot_chi2s_1=tpc_chi2s_1;
if(!matched1 && matched2) tot_chi2s_1=tof_chi2s_1;
minchi2idx = std::distance(tot_chi2s_1.begin(), std::min_element(tot_chi2s_1.begin(), tot_chi2s_1.end()));
recpdg = PDGIDs.at(minchi2idx);
}
else{
tot_chi2s_1={-999,-999,-999,-999,-999};
}
int charge = track.getTrackStates(1).omega/fabs(track.getTrackStates(1).omega);
tot_chi2s.push_back(tot_chi2s_1);
recoPDG.push_back(charge*recpdg);
tpcrecoPDG.push_back(charge*tpcrecpdg);
tofrecoPDG.push_back(charge*tofrecpdg);
}
m_tree->Fill();
_nEvt++;
return StatusCode::SUCCESS;
}// end execute
//------------------------------------------------------------------------------
StatusCode AnalysisPIDAlg::finalize(){
debug() << "Finalizing..." << endmsg;
m_file->cd();
m_tree->Write();
return StatusCode::SUCCESS;
}
#ifndef AnalysisPIDAlg_h
#define AnalysisPIDAlg_h 1
#include "k4FWCore/DataHandle.h"
#include "GaudiKernel/Algorithm.h"
#include "edm4hep/MCParticleCollection.h"
#include "edm4hep/SimTrackerHitCollection.h"
#include "edm4hep/TrackerHit.h"
#include "edm4hep/TrackCollection.h"
#include "edm4hep/TrackerHitCollection.h"
#include "edm4hep/MCRecoTrackerAssociationCollection.h"
#include "edm4hep/MCRecoTrackParticleAssociationCollection.h"
#include "edm4hep/RecDqdx.h"
#include "edm4hep/RecDqdxCollection.h"
#include "edm4cepc/RecTofCollection.h"
#include "edm4hep/TrackState.h"
#include "edm4hep/Vector3d.h"
#include "TFile.h"
#include "TTree.h"
#include <random>
#include "GaudiKernel/NTuple.h"
class AnalysisPIDAlg : public Algorithm {
public:
// Constructor of this form must be provided
AnalysisPIDAlg( const std::string& name, ISvcLocator* pSvcLocator );
// Three mandatory member functions of any algorithm
StatusCode initialize() override;
StatusCode execute() override;
StatusCode finalize() override;
private:
DataHandle<edm4hep::TrackCollection> _FultrkCol{"CompleteTracks", Gaudi::DataHandle::Reader, this};
DataHandle<edm4hep::MCRecoTrackParticleAssociationCollection> _FultrkParAssCol{"CompleteTracksParticleAssociation", Gaudi::DataHandle::Reader, this};
DataHandle<edm4hep::RecDqdxCollection> _inDndxColHdl{"DndxTracks", Gaudi::DataHandle::Reader, this};
DataHandle<edm4hep::RecTofCollection> _inTofColHdl{"RecTofCollection", Gaudi::DataHandle::Reader, this};
Gaudi::Property<std::string> m_outputFile{this, "OutputFile", "pid.root"};
std::vector<double> genpx, genpy, genpz, genE, genp, genM, gentheta, genphi, endx, endy, endz, endr;
std::vector<int> PDG, genstatus, simstatus, recoPDG, tpcrecoPDG, tofrecoPDG;
std::vector<bool> isdecayintrker, iscreatedinsim, isbackscatter, isstopped;
std::vector<bool> matchedtpc, matchedtof;
std::vector<int> truthidx, tpcidx, tofidx;
std::vector<std::vector<double>> tof_chi2s, tof_expt;
std::vector<std::vector<double>> tpc_chi2s, tpc_expdndxs;
std::vector<std::vector<double>> tof_chis, tpc_chis, tot_chi2s;
std::vector<double> tof_meast, tof_measterr;
std::vector<double> tpc_measdndx, tpc_measdndxerr;
std::vector<double> tof_chi2s_1, tof_expt_1;
std::vector<double> tpc_chi2s_1, tpc_expdndxs_1;
std::vector<double> tof_chis_1, tpc_chis_1, tot_chi2s_1;
int _nEvt;
const std::map<int, int> PDGIDs = {
{0, -11},
{1, -13},
{2, 211},
{3, 321},
{4, 2212},
};
double tpcdndx, tpcdndxerr, toft, tofterr;
int Ndndxtrk, Ntoftrk, Nfulltrk, Nfullass;
double max_weight, weight;
int max_weight_idx, ass_idx, dndx_index, tof_index;
double p1, p2, p3, x1, y1;
bool matched1, matched2;
int recpdg, tpcrecpdg, tofrecpdg, minchi2idx;
TFile* m_file;
TTree* m_tree;
void ClearVars(){
Ndndxtrk = 0;
Ntoftrk = 0;
Nfullass = 0;
Nfulltrk = 0;
genpx.clear(); genpy.clear(); genpz.clear(); genE.clear();
genp.clear(); genM.clear(); gentheta.clear(); genphi.clear();
endx.clear(); endy.clear(); endz.clear(); endr.clear();
PDG.clear(); genstatus.clear(); simstatus.clear();
recoPDG.clear(); tpcrecoPDG.clear(); tofrecoPDG.clear();
isdecayintrker.clear(); iscreatedinsim.clear();
isbackscatter.clear(); isstopped.clear();
tof_chi2s.clear();
tof_chis.clear();
tof_meast.clear();
tof_measterr.clear();
tof_expt.clear();
tpc_chi2s.clear();
tpc_chis.clear();
tpc_expdndxs.clear();
tpc_measdndx.clear();
tpc_measdndxerr.clear();
tot_chi2s.clear();
matchedtpc.clear();
matchedtof.clear();
truthidx.clear();
tpcidx.clear();
tofidx.clear();
}
};
#endif
add_subdirectory(TotalInvMass)
add_subdirectory(TrackInspect)
add_subdirectory(DumpEvent)
add_subdirectory(ReadDigi)
add_subdirectory(JetClustering)
add_subdirectory(GenMatch)
add_subdirectory(AnalysisPID)
gaudi_add_module(DumpEvent
SOURCES src/DumpMCParticleAlg.cpp
src/DumpSimHitAlg.cpp
#src/DumpHitAlg.cpp
src/DumpTrackAlg.cpp
#src/DumpCalorimeterAlg.cpp
LINK DataHelperLib
Gaudi::GaudiKernel
EDM4HEP::edm4hep
${ROOT_LIBRARIES}
${CLHEP_LIBRARIES}
${DD4hep_COMPONENT_LIBRARIES}
DetInterface
k4FWCore::k4FWCore
)
install(TARGETS DumpEvent
EXPORT CEPCSWTargets
RUNTIME DESTINATION "${CMAKE_INSTALL_BINDIR}" COMPONENT bin
LIBRARY DESTINATION "${CMAKE_INSTALL_LIBDIR}" COMPONENT shlib
COMPONENT dev)
#include "DumpMCParticleAlg.h"
#include "GaudiKernel/DataObject.h"
#include "GaudiKernel/IHistogramSvc.h"
#include "GaudiKernel/MsgStream.h"
#include "GaudiKernel/SmartDataPtr.h"
#include "DetInterface/IGeomSvc.h"
#include "DataHelper/HelixClass.h"
#include "DD4hep/Detector.h"
#include "DD4hep/DD4hepUnits.h"
#include "CLHEP/Units/SystemOfUnits.h"
#include <math.h>
DECLARE_COMPONENT( DumpMCParticleAlg )
//------------------------------------------------------------------------------
DumpMCParticleAlg::DumpMCParticleAlg( const std::string& name, ISvcLocator* pSvcLocator )
: Algorithm( name, pSvcLocator ) {
declareProperty("MCParticleCollection", _inMCColHdl, "Handle of the Input MCParticle collection");
m_thisName = name;
}
//------------------------------------------------------------------------------
StatusCode DumpMCParticleAlg::initialize(){
info() << "Booking Ntuple" << endmsg;
NTuplePtr nt1(ntupleSvc(), "MyTuples/MC");
if ( !nt1 ) {
m_tuple = ntupleSvc()->book("MyTuples/MC",CLID_ColumnWiseTuple,"MC truth");
if ( 0 != m_tuple ) {
m_tuple->addItem ("nmc", m_nParticles, 0, 1000 ).ignore();
m_tuple->addIndexedItem ("pdg", m_nParticles, m_pdgID ).ignore();
m_tuple->addIndexedItem ("genStatus", m_nParticles, m_genStatus ).ignore();
m_tuple->addIndexedItem ("simStatus", m_nParticles, m_simStatus ).ignore();
m_tuple->addIndexedItem ("charge", m_nParticles, m_charge ).ignore();
m_tuple->addIndexedItem ("time", m_nParticles, m_time ).ignore();
m_tuple->addIndexedItem ("mass", m_nParticles, m_mass ).ignore();
m_tuple->addIndexedItem ("vx", m_nParticles, m_vx ).ignore();
m_tuple->addIndexedItem ("vy", m_nParticles, m_vy ).ignore();
m_tuple->addIndexedItem ("vz", m_nParticles, m_vz ).ignore();
m_tuple->addIndexedItem ("px", m_nParticles, m_px ).ignore();
m_tuple->addIndexedItem ("py", m_nParticles, m_py ).ignore();
m_tuple->addIndexedItem ("pz", m_nParticles, m_pz ).ignore();
m_tuple->addIndexedItem ("d0", m_nParticles, m_d0 ).ignore();
m_tuple->addIndexedItem ("phi0", m_nParticles, m_phi0 ).ignore();
m_tuple->addIndexedItem ("omega", m_nParticles, m_omega ).ignore();
m_tuple->addIndexedItem ("z0", m_nParticles, m_z0 ).ignore();
m_tuple->addIndexedItem ("tanLambda", m_nParticles, m_tanLambda ).ignore();
}
else { // did not manage to book the N tuple....
fatal() << "Cannot bool MyTuples/MC " << endmsg;
return StatusCode::FAILURE;
}
}
else{
m_tuple = nt1;
}
auto geomSvc = service<IGeomSvc>("GeomSvc");
if(geomSvc){
const dd4hep::Direction& field = geomSvc->lcdd()->field().magneticField(dd4hep::Position(0,0,0));
m_field = field.z()/dd4hep::tesla;
info() << "Magnetic field will obtain from GeomSvc = " << m_field << " tesla" << endmsg;
}
else{
info() << "Failed to find GeomSvc ..." << endmsg;
info() << "Magnetic field will use what input through python option for this algorithm namse as Field, now " << m_field << " tesla" << endmsg;
}
_nEvt = 0;
return StatusCode::SUCCESS;
}
//------------------------------------------------------------------------------
StatusCode DumpMCParticleAlg::execute(){
const edm4hep::MCParticleCollection* mcCols = nullptr;
try {
mcCols = _inMCColHdl.get();
}
catch ( GaudiException &e ) {
debug() << "Collection " << _inMCColHdl.fullKey() << " is unavailable in event " << _nEvt << endmsg;
}
if(mcCols){
m_nParticles = 0;
for(auto particle : *mcCols){
m_pdgID[m_nParticles] = particle.getPDG();
m_genStatus[m_nParticles] = particle.getGeneratorStatus();
m_simStatus[m_nParticles] = particle.getSimulatorStatus();
m_charge[m_nParticles] = particle.getCharge();
m_time[m_nParticles] = particle.getTime();
m_mass[m_nParticles] = particle.getMass();
const auto& vertex = particle.getVertex();
m_vx[m_nParticles] = vertex.x;
m_vy[m_nParticles] = vertex.y;
m_vz[m_nParticles] = vertex.z;
const auto& momentum = particle.getMomentum();
m_px[m_nParticles] = momentum.x;
m_py[m_nParticles] = momentum.y;
m_pz[m_nParticles] = momentum.z;
HelixClass helix;
float posV[3] = {vertex.x,vertex.y,vertex.z};
float momV[3] = {momentum.x,momentum.y,momentum.z};
helix.Initialize_VP(posV,momV,particle.getCharge(),m_field);
float phiMC = helix.getPhi0();
if(phiMC>CLHEP::pi) phiMC = phiMC - CLHEP::twopi;
m_phi0[m_nParticles] = phiMC;
m_d0[m_nParticles] = helix.getD0();
m_omega[m_nParticles] = helix.getOmega();
m_z0[m_nParticles] = helix.getZ0();
m_tanLambda[m_nParticles] = helix.getTanLambda();
m_nParticles++;
}
debug() << "MCParticle: " << m_nParticles <<endmsg;
}
m_tuple->write();
_nEvt++;
return StatusCode::SUCCESS;
}
//------------------------------------------------------------------------------
StatusCode DumpMCParticleAlg::finalize(){
debug() << "Finalizing..." << endmsg;
return StatusCode::SUCCESS;
}
#ifndef DumpMCParticleAlg_h
#define DumpMCParticleAlg_h 1
#include "k4FWCore/DataHandle.h"
#include "GaudiKernel/Algorithm.h"
#include "edm4hep/MCParticleCollection.h"
#include "edm4hep/TrackCollection.h"
#include "GaudiKernel/NTuple.h"
class DumpMCParticleAlg : public Algorithm {
public:
// Constructor of this form must be provided
DumpMCParticleAlg( const std::string& name, ISvcLocator* pSvcLocator );
// Three mandatory member functions of any algorithm
StatusCode initialize() override;
StatusCode execute() override;
StatusCode finalize() override;
private:
DataHandle<edm4hep::MCParticleCollection> _inMCColHdl{"MCParticle", Gaudi::DataHandle::Reader, this};
Gaudi::Property<double> m_field{this, "Field", 3.0};
NTuple::Tuple* m_tuple;
NTuple::Item<long> m_nParticles;
NTuple::Array<int> m_pdgID;
NTuple::Array<int> m_genStatus;
NTuple::Array<int> m_simStatus;
NTuple::Array<float> m_charge;
NTuple::Array<float> m_time;
NTuple::Array<double> m_mass;
NTuple::Array<double> m_vx;
NTuple::Array<double> m_vy;
NTuple::Array<double> m_vz;
NTuple::Array<float> m_px;
NTuple::Array<float> m_py;
NTuple::Array<float> m_pz;
NTuple::Array<float> m_d0;
NTuple::Array<float> m_phi0;
NTuple::Array<float> m_omega;
NTuple::Array<float> m_z0;
NTuple::Array<float> m_tanLambda;
int _nEvt;
std::string m_thisName;
};
#endif
/*
* Description:
* Dump the simulated information.
*
* Author:
* Tao Lin <lintao AT ihep.ac.cn>
*/
#include "k4FWCore/DataHandle.h"
#include "GaudiKernel/Algorithm.h"
#include "edm4hep/MCParticleCollection.h"
#include "edm4hep/SimTrackerHitCollection.h"
#include "edm4hep/SimCalorimeterHitCollection.h"
#include "edm4hep/CaloHitContributionCollection.h"
#include "GaudiKernel/NTuple.h"
class DumpSimHitAlg: public Algorithm {
public:
DumpSimHitAlg(const std::string& name, ISvcLocator* pSvcLocator);
// Three mandatory member functions of any algorithm
StatusCode initialize() override;
StatusCode execute() override;
StatusCode finalize() override;
private:
// - collection MCParticleG4: the simulated particles in Geant4
DataHandle<edm4hep::MCParticleCollection> m_mcParCol{"MCParticle",
Gaudi::DataHandle::Reader, this};
// Dedicated collections for CEPC
DataHandle<edm4hep::SimTrackerHitCollection> m_VXDCol{"VXDCollection",
Gaudi::DataHandle::Reader, this};
};
DECLARE_COMPONENT( DumpSimHitAlg )
DumpSimHitAlg::DumpSimHitAlg(const std::string& name, ISvcLocator* pSvcLocator)
: Algorithm(name, pSvcLocator) {
}
StatusCode DumpSimHitAlg::initialize() {
return StatusCode::SUCCESS;
}
StatusCode DumpSimHitAlg::execute() {
auto mcCol = m_mcParCol.get();
for (auto particle: *mcCol) {
info() << "mc particle -> "
<< " (ID: " << particle.getObjectID().index << ") "
<< " (simulator status: " << particle.getSimulatorStatus() << ") "
<< endmsg;
}
auto vxdCol = m_VXDCol.get();
for (auto hit: *vxdCol) {
auto mcparticle = hit.getMCParticle();
if (mcparticle.getGeneratorStatus() != 1) {
error() << "Found generator status is not 1 for hit. " << endmsg;
}
info() << "hit -> "
<< " mcparticle ("
<< " ID: " << mcparticle.getObjectID().index << "; "
<< " generator status: " << mcparticle.getGeneratorStatus() << "; "
<< " simulator status: " << mcparticle.getSimulatorStatus() << ") "
<< endmsg;
}
return StatusCode::SUCCESS;
}
StatusCode DumpSimHitAlg::finalize() {
return StatusCode::SUCCESS;
}