Difference: SiteDefCream2nd (5 vs. 6)

Revision 62013-03-28 - AlessandroPaolini

Line: 1 to 1
 
META TOPICPARENT name="NotesAboutInstallationAndConfigurationOfCREAMForTORQUE"
###################################
# General configuration variables #
###################################

# List of the batch nodes hostnames and optionally the subcluster ID the
# WN belongs to. An example file is available in 
# ${INSTALL_ROOT}/glite/yaim/examples/wn-list.conf
# Change the path according to your site settings.
#WN_LIST=${INSTALL_ROOT}/glite/yaim/examples/wn-list.conf
WN_LIST=/root/wn-list.conf

# List of unix users to be created in the service nodes.
# The format is as follows:
# UID:LOGIN:GID1,GID2,...:GROUP1,GROUP2,...:VO:FLAG:
# An example file is available in ${INSTALL_ROOT}/glite/yaim/examples/ig-users.conf
# Change the path according to your site settings.
# For more information please check ${INSTALL_ROOT}/glite/yaim/examples/users.conf.README 
USERS_CONF=${YAIM_ROOT}/glite/yaim/examples/ig-users.conf

# List of the local accounts which a user should be mapped to.
# The format is as follows:
# "VOMS_FQAN":GROUP:GID:FLAG:[VO]
# An example file is available in ${INSTALL_ROOT}/glite/yaim/examples/ig-groups.conf
# Change the path according to your site settings.
# For more information please check ${INSTALL_ROOT}/glite/yaim/examples/groups.conf.README
# NOTE: comment out this variable if you want to specify a groups.conf per VO
# under the group.d/ directory.
GROUPS_CONF=${YAIM_ROOT}/glite/yaim/examples/ig-groups.conf

# Uncomment this variable if you want to specify a local groups.conf 
# It is similar to GROUPS_CONF but used to specify a separate file
# where local accounts specific to the site are defined.
# LOCAL_GROUPS_CONF=my_local_groups.conf

# Uncomment this variable if you are installing a mysql server
# It is the MySQL admin password. 
MYSQL_PASSWORD=infncnaf

# Uncomment this variable if you want to explicitely use pool
# accounts for special users when generating the grid-mapfile.
# If not defined, YAIM will decide whether to use special 
# pool accounts or not automatically
# SPECIAL_POOL_ACCOUNTS=yes or no

# INFN-GRID: Keep in site-info.def
# WARNING! This variable will be removed in a future release
JAVA_LOCATION="/usr/java/latest"


################################
# Site configuration variables #
################################

# Human-readable name of your site
SITE_NAME=INFN-CNAF


#########################################
# ARGUS authorisation framework control #
#########################################

# Set USE_ARGUS to yes to enable the configuration of ARGUS
USE_ARGUS=yes

# In case ARGUS is to be used the following should be set
# The ARGUS service PEPD endpoints as a space separated list:
#ARGUS_PEPD_ENDPOINTS="http://pepd.example.org:8154/authz"
ARGUS_PEPD_ENDPOINTS="https://vgrid06.cnaf.infn.it:8154/authz"

# ARGUS resource identities: The resource ID can be set
# for the cream CE, WMS and other nodes respectively.
# If a resource ID is left unset the ARGUS configuration
# will be skipped on the associated node.
# CREAM_PEPC_RESOURCEID=urn:mysitename.org:resource:ce
# WMS_PEPC_RESOURCEID=urn:mysitename.org:resource:wms
# GENERAL_PEPC_RESOURCEID=urn:mysitename.org:resource:other
CREAM_PEPC_RESOURCEID="http://cnaf.infn.it/cremino"

# INFN-GRID: space separed list of the IP addresses of the NTP servers
#            (preferably set a local ntp server and a public one, ex. pool.ntp.org)
#NTP_HOSTS_IP="131.154.1.53 131.154.1.103 pool.ntp.org"
NTP_HOSTS_IP="ripe.cnaf.infn.it ntp-3.infn.it ntp-1.infn.it"

#--------------------------#
# Private network settings #
#--------------------------#

# INFN-GRID: Set PRIVATE_NETWORK=true to use WN on private network
PRIVATE_NETWORK=false
# INFN-GRID: If PRIVATE_NETWORK=true, uncomment and write the internal domain name 
#MY_INT_DOMAIN=your_internal_domain
# INFN-GRID: If PRIVATE_NETWORK=true, uncomment and write your internal network  
#INT_NET=your_intarnal_network
# INFN-GRID: If PRIVATE_NETWORK=true, uncomment and write the internal FQDN hostname of CE 
#CE_INT_HOST=your_internal_ce_host.$MY_INT_DOMAIN
# INFN-GRID: If PRIVATE_NETWORK=true, uncomment and write the internal hostname
#            of the host exporting the directory used to install the application software
#INT_HOST_SW_DIR=$CE_INT_HOST


##############################
# CE configuration variables #
##############################

# Hostname of the CE
CE_HOST=cremoso.$MY_DOMAIN

############################
# SubCluster configuration #
############################

# Name of the processor model as defined by the vendor 
# for the Worker Nodes in a SubCluster.
CE_CPU_MODEL=Opteron

# Name of the processor vendor 
# for the Worker Nodes in a SubCluster
CE_CPU_VENDOR=AuthenticAMD

# Processor clock speed expressed in MHz 
# for the Worker Nodes in a SubCluster.
CE_CPU_SPEED=2589

# For the following variables please check:
# https://wiki.egi.eu/wiki/Operations/HOWTO05
#
# Operating system name used on the Worker Nodes 
# part of the SubCluster.
CE_OS="ScientificSL"

# Operating system release used on the Worker Nodes
# part of the SubCluster.
CE_OS_RELEASE="5.5"

# Operating system version used on the Worker Nodes
# part of the SubCluster.
CE_OS_VERSION="Boron"

# Platform Type of the WN in the SubCluster
# Check: https://wiki.egi.eu/wiki/Operations/HOWTO06 
CE_OS_ARCH=x86_64

# Total physical memory of a WN in the SubCluster
# expressed in Megabytes.
CE_MINPHYSMEM=4096

# Total virtual memory of a WN in the SubCluster
# expressed in Megabytes.
CE_MINVIRTMEM=1024

# Total number of real CPUs/physical chips in 
# the SubCluster, including the nodes part of the 
# SubCluster that are temporary down or offline. 
CE_PHYSCPU=0

# Total number of cores/hyperthreaded CPUs in 
# the SubCluster, including the nodes part of the 
# SubCluster that are temporary down or offline
CE_LOGCPU=0

# Number of Logical CPUs (cores) of the WN in the 
# SubCluster
CE_SMPSIZE=2

# Performance index of your fabric in SpecInt 2000
CE_SI00=1039

# Performance index of your fabric in SpecFloat 2000
CE_SF00=951

# Set this variable to either TRUE or FALSE to express 
# the permission for direct outbound connectivity 
# for the WNs in the SubCluster
CE_OUTBOUNDIP=TRUE

# Set this variable to either TRUE or FALSE to express 
# the permission for inbound connectivity 
# for the WNs in the SubCluster
CE_INBOUNDIP=FALSE

# Space separated list of software tags supported by the site
# e.g. CE_RUNTIMEENV="LCG-2 LCG-2_1_0 LCG-2_1_1 LCG-2_2_0 GLITE-3_0_0 GLITE-3_1_0 R-GMA"
# INFN-GRID: Add the following tags to runtime environment:
#            * If your site belongs to INFN write CITY in capitals (ex. PADOVA)
#              otherwise INSTITUTE-CITY in capitals (ex. SNS-PISA)
#            * Write the average value of SpecInt2000 and SpecFloat2000 for your WNs;
#              please note that now a '_' is used as separator in place of '='
#              (see at http://grid-it.cnaf.infn.it/fileadmin/Certification/MetricHowTo.pdf)
#              SI00MeanPerCPU_<your_value>
#              SF00MeanPerCPU_<your_value>
#CE_RUNTIMEENV="tag1 [tag2 [...]]"
CE_RUNTIMEENV="
    CNAF
    SI00MeanPerCPU_1039
    SF00MeanPerCPU_951
    GLITE-3_0_0
    GLITE-3_1_0
    GLITE-3_2_0
"


# For the following variables, please check more detailed information in:
# https://twiki.cern.ch/twiki/bin/view/LCG/Site-info_configuration_variables#site_info_def
#
# The following values must be defined by the sys admin:
# - CPUScalingReferenceSI00=<referenceCPU-SI00> 
# - Share=<vo-name>:<vo-share> (optional, multiple definitons) 
#CE_CAPABILITY="CPUScalingReferenceSI00=value [Share=vo-name1:value [Share=vo-name2:value [...]]]"
CE_CAPABILITY="CPUScalingReferenceSI00=1039 glexec"

# The following values must be defined by the sys admin:
# - Cores=value
# - value-HEP-SPEC06 (optional), where value is the CPU power computed
#   using the HEP-SPEC06 benchmark
#CE_OTHERDESCR="Cores=value[,Benchmark=value-HEP-SPEC06]" 
CE_OTHERDESCR="Cores=1,Benchmark=4.156-HEP-SPEC06"


########################################
# Batch server configuration variables #
########################################

#Set it to 'no' if you want to disable the maui configuration in YAIM 
#Default value: yes
Changed:
<
<
CONFIG_MAUI="no"
>
>
#CONFIG_MAUI="yes"
  #Set it to 'no' if you want to disable the /var/torque/server_priv/nodes configuration in YAIM #Default value: yes
Changed:
<
<
CONFIG_TORQUE_NODES="no"
>
>
#CONFIG_TORQUE_NODES="yes"
  # Hostname of the Batch server # Change this if your batch server is not installed # in the same host of the CE # INFN-GRID: Set this variable to the hostname of your batch master server, that # that could be different from current CE (for example in the case of # sites with two CEs). Do not comment it! BATCH_SERVER=cremino.cnaf.infn.it

# Jobmanager specific settings. Please, define: # lcgpbs, lcglsf, lcgsge or lcgcondor # INFN-GRID: for CREAM CE write it without 'lcg' prefix: pbs, lsf, sge, condor JOB_MANAGER=pbs

# torque, lsf, sge or condor # INFN-GRID: write here 'pbs' even if you are using torque (for compatibility # with MPI support on Globus), write 'lsf' if you using LSF. CE_BATCH_SYS=pbs

# INFN-GRID: only for torque/pbs; path for batch-system log files # BATCH_LOG_DIR=/var/torque/ BATCH_LOG_DIR=/var/torque/

BATCH_VERSION=2.5.7

# INFN-GRID: only for LSF; path for the batch-system bin files #BATCH_BIN_DIR=my_batch_system_bin_dir

# INFN-GRID: only for LSF; path where lsf.conf is located #BATCH_CONF_DIR=lsf_install_path/conf

#Path of a file containing the munge key. Munge is required since Torque version 2.5.7. This file will be copied to /etc/munge/munge.key MUNGE_KEY_FILE=/etc/munge/munge.key

################################ # APEL configuration variables # ################################

# Database password for the APEL DB. APEL_DB_PASSWORD=macchevvoi

MON_HOST=$CE_HOST

######################## # SE general variables # ########################

STORM_HOST=sunstorm.$MY_DOMAIN STORM2_HOST=darkstorm.$MY_DOMAIN

# Space separated list of SEs hostnames SE_LIST="$STORM_HOST $STORM2_HOST"

# Space separated list of SE hosts from SE_LIST containing # the export directory from the Storage Element and the # mount directory common to the worker nodes that are part # of the Computing Element. If any of the SEs in SE_LIST # does not support the mount concept, do not define # anything for that SE in this variable. If this is the case # for all the SEs in SE_LIST then put the value "none" #SE_MOUNT_INFO_LIST="[SE1:export_dir1,mount_dir1 [SE2:export_dir2,mount_dir2 [...]]|none]" SE_MOUNT_INFO_LIST="none"

################################ # BDII configuration variables # ################################

# Hostname of the top level BDII # INFN-GRID: Default BDII Top for Italy (if you do not have your own) BDII_HOST=egee-bdii.cnaf.infn.it

############################## # VO configuration variables # ############################## # If you are configuring a DNS-like VO, please check # the following URL: https://twiki.cern.ch/twiki/bin/view/LCG/YaimGuide400#vo_d_directory

# Space separated list of VOs supported by your site # INFN-GRID: Available VOs (alphabetically sorted - count 44): # alice ams02.cern.ch argo atlas babar # bio biomed cdf cms compassit compchem cyclops # comput-er.it d4science.research-infrastructures.eu dteam enea # enmr.eu esr eticsproject.eu euasia euchina euindia # eumed gilda geant4 gear glast.org # gridit inaf infngrid lhcb # libi lights.infn.it magic omiieurope ops # pacs.infn.it pamela planck superbvo.org theophys # tps.infn.it virgo zeus # INFN-GRID: Your site must support the following certification VOs: # dteam infngrid ops #VOS="vo1 [vo2 [...]]" VOS="comput-er.it dteam glast.org infngrid ops gridit enmr.eu"

# Prefix of the experiment software directory in your CE # INFN-GRID: Agreement on software directory name VO_SW_DIR=/opt/exp_soft

# Space separated list of queues configured in your CE # INFN-GRID: Your site must configure a queue that support certification VOs #QUEUES="q1 [q2 [...]]" QUEUES="cert prod cloudtf"

# For each queue defined in QUEUES, define a _GROUP_ENABLE variable # which is a space separated list of VO names and VOMS FQANs: # Ex.: MYQUEUE_GROUP_ENABLE="ops atlas cms /cms/Higgs /cms/ROLE=production" # In QUEUE names containing dots and dashes replace them with underscore: # Ex.: QUEUES="my.test-queue" # MY_TEST_QUEUE_GROUP_ENABLE="ops atlas" # INFN-GRID: Here an example of the settings for certification queue: # CERT_GROUP_ENABLE="dteam infngrid ops" #<queue-name>_GROUP_ENABLE="fqan1 [fqan2 [...]]"

CERT_GROUP_ENABLE="dteam infngrid ops /dteam/ROLE=lcgadmin /dteam/ROLE=production /ops/ROLE=lcgadmin /ops/ROLE=pilot /infngrid/ROLE=SoftwareManager /infngrid/ROLE=pilot" PROD_GROUP_ENABLE="comput-er.it gridit glast.org /comput-er.it/ROLE=SoftwareManager /gridit/ROLE=SoftwareManager /glast.org/ROLE=SoftwareManager /glast.org/ROLE=prod" CLOUDTF_GROUP_ENABLE="dteam /dteam/ROLE=lcgadmin /dteam/ROLE=production enmr.eu /enmr.eu/ROLE=SoftwareManager"

# Optional variable to define the default SE used by the VO. # Define the SE hostname if you want a specific SE to be the default one. # If this variable is not defined, the first SE in SE_LIST will be used # as the default one. # VO__DEFAULT_SE=vo-default-se

# Optional variable to define a list of LBs used by the VO. # Define a space separated list of LB hostnames. # If this variable is not defined LB_HOST will be used. # VO__LB_HOSTS="vo-lb1 [vo-lb2 [...]]"

# Optional variable to automatically add wildcards per FQAN # in the LCMAPS gripmap file and groupmap file. Set it to 'yes' # if you want to add the wildcards in your VO. Do not define it # or set it to 'no' if you do not want to configure wildcards in your VO. # VO__MAP_WILDCARDS=no

# Optional variable to define the Myproxy server supported by the VO. # Define the Myproxy hostname if you want a specific Myproxy server. # If this variable is not defined PX_HOST will be used. # VO__PX_HOST=vo-myproxy

# Optional variable to define a list of RBs used by the VO. # Define a space separated list of RB hostnames. # If this variable is not defined RB_HOST will be used. # VO__RBS="vo-rb1 [vo-rb2 [...]]"

# Area on the WN for the installation of the experiment software. # If on the WNs a predefined shared area has been mounted where # VO managers can pre-install software, then these variable # should point to this area. If instead there is not a shared # area and each job must install the software, then this variables # should contain a dot ( . ). Anyway the mounting of shared areas, # as well as the local installation of VO software is not managed # by yaim and should be handled locally by Site Administrators. # VO__SW_DIR=wn_exp_soft_dir

# This variable contains the vomses file parameters needed # to contact a VOMS server. Multiple VOMS servers can be given # if the parameters are enclosed in single quotes. # VO__VOMSES="'vo_name voms_server_hostname port voms_server_host_cert_dn vo_name' ['...']"

# DN of the CA that signs the VOMS server certificate. # Multiple values can be given if enclosed in single quotes. # Note that there must be as many entries as in the VO_<vo-name>_VOMSES variable. # There is a one to one relationship in the elements of both lists, # so the order must be respected # VO__VOMS_CA_DN="'voms_server_ca_dn' ['...']"

# A list of the VOMS servers used to create the DN grid-map file. # Multiple values can be given if enclosed in single quotes. # VO__VOMS_SERVERS="'vomss://<host-name>:8443/voms/<vo-name>?/<vo-name>' ['...']"

# Optional variable to define a list of WMSs used by the VO. # Define a space separated list of WMS hostnames. # If this variable is not defined WMS_HOST will be used. # VO__WMS_HOSTS="vo-wms1 [vo-wms2 [...]]"

# Optional variable to create a grid-mapfile with mappings to ordinary # pool accounts, not containing mappings to special users. # - UNPRIVILEGED_MKGRIDMAP=no or undefined, will contain # special users if defined in groups.conf # - UNPRIVILEGED_MKGRIDMAP=yes, will create a grid-mapfile # containing only mappings to ordinary pool accounts. # VO__UNPRIVILEGED_MKGRIDMAP=no

# gLite pool account home directory for the user accounts specified in USERS_CONF. # Define this variable if you would like to use a directory different than /home. # VO__USER_HOME_PREFIX=/pool_account_home_dir

# Examples for the following VOs are included below: # # atlas # alice # lhcb # cms # dteam # biomed # ops # # VOs should check the CIC portal http://cic.in2p3.fr for the VO ID card information # #

# INFN-GRID: aux variable: default SE per VO #SE_HOST=my-close-se.${MY_DOMAIN} SE_HOST=$STORM2_HOST

######### # atlas # ######### VO_ATLAS_SW_DIR=$VO_SW_DIR/atlas VO_ATLAS_DEFAULT_SE=$SE_HOST VO_ATLAS_STORAGE_DIR=$CLASSIC_STORAGE_DIR/atlas VO_ATLAS_VOMS_SERVERS='vomss://voms.cern.ch:8443/voms/atlas?/atlas/' # INFN-GRID: added BNL replica VO_ATLAS_VOMSES="'atlas lcg-voms.cern.ch 15001 /DC=ch/DC=cern/OU=computers/CN=lcg-voms.cern.ch atlas 24' 'atlas voms.cern.ch 15001 /DC=ch/DC=cern/OU=computers/CN=voms.cern.ch atlas 24' 'atlas vo.racf.bnl.gov 15003 /DC=org/DC=doegrids/OU=Services/CN=vo.racf.bnl.gov atlas 24'" # INFN-GRID: added BNL replica VO_ATLAS_VOMS_CA_DN="'/DC=ch/DC=cern/CN=CERN Trusted Certification Authority' '/DC=ch/DC=cern/CN=CERN Trusted Certification Authority' '/DC=org/DC=DOEGrids/OU=Certificate Authorities/CN=DOEGrids CA 1'" VO_ATLAS_RBS="atlasrb1.cern.ch atlasrb2.cern.ch"

########## # alice # ########## VO_ALICE_SW_DIR=$VO_SW_DIR/alice VO_ALICE_DEFAULT_SE=$SE_HOST VO_ALICE_STORAGE_DIR=$CLASSIC_STORAGE_DIR/alice VO_ALICE_VOMS_SERVERS='vomss://voms.cern.ch:8443/voms/alice?/alice/' VO_ALICE_VOMSES="'alice lcg-voms.cern.ch 15000 /DC=ch/DC=cern/OU=computers/CN=lcg-voms.cern.ch alice 24' 'alice voms.cern.ch 15000 /DC=ch/DC=cern/OU=computers/CN=voms.cern.ch alice 24'" VO_ALICE_VOMS_CA_DN="'/DC=ch/DC=cern/CN=CERN Trusted Certification Authority' '/DC=ch/DC=cern/CN=CERN Trusted Certification Authority'"

####### # cms # ####### VO_CMS_SW_DIR=$VO_SW_DIR/cms VO_CMS_DEFAULT_SE=$SE_HOST VO_CMS_STORAGE_DIR=$CLASSIC_STORAGE_DIR/cms VO_CMS_VOMS_SERVERS='vomss://voms.cern.ch:8443/voms/cms?/cms/' VO_CMS_VOMSES="'cms lcg-voms.cern.ch 15002 /DC=ch/DC=cern/OU=computers/CN=lcg-voms.cern.ch cms 24' 'cms voms.cern.ch 15002 /DC=ch/DC=cern/OU=computers/CN=voms.cern.ch cms 24'" VO_CMS_VOMS_CA_DN="'/DC=ch/DC=cern/CN=CERN Trusted Certification Authority' '/DC=ch/DC=cern/CN=CERN Trusted Certification Authority'"

######## # lhcb # ######## VO_LHCB_SW_DIR=$VO_SW_DIR/lhcb VO_LHCB_DEFAULT_SE=$SE_HOST VO_LHCB_STORAGE_DIR=$CLASSIC_STORAGE_DIR/lhcb VO_LHCB_VOMS_SERVERS='vomss://voms.cern.ch:8443/voms/lhcb?/lhcb/' VO_LHCB_VOMSES="'lhcb lcg-voms.cern.ch 15003 /DC=ch/DC=cern/OU=computers/CN=lcg-voms.cern.ch lhcb 24' 'lhcb voms.cern.ch 15003 /DC=ch/DC=cern/OU=computers/CN=voms.cern.ch lhcb 24'" VO_LHCB_VOMS_CA_DN="'/DC=ch/DC=cern/CN=CERN Trusted Certification Authority' '/DC=ch/DC=cern/CN=CERN Trusted Certification Authority'"

######### # dteam # ######### VO_DTEAM_SW_DIR=$VO_SW_DIR/dteam VO_DTEAM_DEFAULT_SE=$SE_HOST VO_DTEAM_STORAGE_DIR=$CLASSIC_STORAGE_DIR/dteam VO_DTEAM_VOMS_SERVERS='vomss://voms.hellasgrid.gr:8443/voms/dteam?/dteam/' VO_DTEAM_VOMSES="'dteam voms.hellasgrid.gr 15004 /C=GR/O=HellasGrid/OU=hellasgrid.gr/CN=voms.hellasgrid.gr dteam 24' 'dteam voms2.hellasgrid.gr 15004 /C=GR/O=HellasGrid/OU=hellasgrid.gr/CN=voms2.hellasgrid.gr dteam 24'" VO_DTEAM_VOMS_CA_DN="'/C=GR/O=HellasGrid/OU=Certification Authorities/CN=HellasGrid CA 2006' '/C=GR/O=HellasGrid/OU=Certification Authorities/CN=HellasGrid CA 2006'"

########## # biomed # ########## VO_BIOMED_SW_DIR=$VO_SW_DIR/biomed VO_BIOMED_DEFAULT_SE=$SE_HOST VO_BIOMED_STORAGE_DIR=$CLASSIC_STORAGE_DIR/biomed VO_BIOMED_VOMS_SERVERS="vomss://cclcgvomsli01.in2p3.fr:8443/voms/biomed?/biomed/" VO_BIOMED_VOMSES="'biomed cclcgvomsli01.in2p3.fr 15000 /O=GRID-FR/C=FR/O=CNRS/OU=CC-IN2P3/CN=cclcgvomsli01.in2p3.fr biomed 24'" # INFN-GRID: Fix CA DN VO_BIOMED_VOMS_CA_DN="'/C=FR/O=CNRS/CN=GRID2-FR'"

####### # ops # ####### VO_OPS_SW_DIR=$VO_SW_DIR/ops VO_OPS_DEFAULT_SE=$SE_HOST VO_OPS_STORAGE_DIR=$CLASSIC_STORAGE_DIR/ops VO_OPS_VOMS_SERVERS="vomss://voms.cern.ch:8443/voms/ops?/ops/" VO_OPS_VOMSES="'ops lcg-voms.cern.ch 15009 /DC=ch/DC=cern/OU=computers/CN=lcg-voms.cern.ch ops 24' 'ops voms.cern.ch 15009 /DC=ch/DC=cern/OU=computers/CN=voms.cern.ch ops 24'" VO_OPS_VOMS_CA_DN="'/DC=ch/DC=cern/CN=CERN Trusted Certification Authority' '/DC=ch/DC=cern/CN=CERN Trusted Certification Authority'"

#====================== # INFN-GRID: Added VOs #======================

######## # argo # ######## VO_ARGO_SW_DIR=$VO_SW_DIR/argo VO_ARGO_DEFAULT_SE=$SE_HOST VO_ARGO_STORAGE_DIR=$CLASSIC_STORAGE_DIR/argo VO_ARGO_VOMS_SERVERS="'vomss://voms.cnaf.infn.it:8443/voms/argo?/argo' 'vomss://voms-01.pd.infn.it:8443/voms/argo?/argo'" VO_ARGO_VOMSES="'argo voms.cnaf.infn.it 15012 /C=IT/O=INFN/OU=Host/L=CNAF/CN=voms.cnaf.infn.it argo' 'argo voms-01.pd.infn.it 15012 /C=IT/O=INFN/OU=Host/L=Padova/CN=voms-01.pd.infn.it argo'" VO_ARGO_VOMS_CA_DN="'/C=IT/O=INFN/CN=INFN CA' '/C=IT/O=INFN/CN=INFN CA'"

######### # babar # ######### VO_BABAR_SW_DIR=$VO_SW_DIR/babar VO_BABAR_DEFAULT_SE=$SE_HOST VO_BABAR_STORAGE_DIR=$CLASSIC_STORAGE_DIR/babar VO_BABAR_VOMS_SERVERS="'vomss://voms.gridpp.ac.uk:8443/voms/babar?/babar'" VO_BABAR_VOMSES="'babar voms.gridpp.ac.uk 15002 /C=UK/O=eScience/OU=Manchester/L=HEP/CN=voms.gridpp.ac.uk babar'" VO_BABAR_VOMS_CA_DN="'/C=UK/O=eScienceCA/OU=Authority/CN=UK e-Science CA 2B'"

####### # bio # ####### VO_BIO_SW_DIR=$VO_SW_DIR/bio VO_BIO_DEFAULT_SE=$SE_HOST VO_BIO_STORAGE_DIR=$CLASSIC_STORAGE_DIR/bio VO_BIO_VOMS_SERVERS="'vomss://voms.cnaf.infn.it:8443/voms/bio?/bio' 'vomss://voms-01.pd.infn.it:8443/voms/bio?/bio'" VO_BIO_VOMSES="'bio voms.cnaf.infn.it 15007 /C=IT/O=INFN/OU=Host/L=CNAF/CN=voms.cnaf.infn.it bio' 'bio voms-01.pd.infn.it 15007 /C=IT/O=INFN/OU=Host/L=Padova/CN=voms-01.pd.infn.it bio'" VO_BIO_VOMS_CA_DN="'/C=IT/O=INFN/CN=INFN CA' '/C=IT/O=INFN/CN=INFN CA'"

####### # cdf # ####### VO_CDF_SW_DIR=$VO_SW_DIR/cdf VO_CDF_DEFAULT_SE=$SE_HOST VO_CDF_STORAGE_DIR=$CLASSIC_STORAGE_DIR/cdf VO_CDF_VOMS_SERVERS="'vomss://voms.cnaf.infn.it:8443/voms/cdf?/cdf' 'vomss://voms-01.pd.infn.it:8443/voms/cdf?/cdf'" VO_CDF_VOMSES="'cdf voms.cnaf.infn.it 15001 /C=IT/O=INFN/OU=Host/L=CNAF/CN=voms.cnaf.infn.it cdf' 'cdf voms-01.pd.infn.it 15001 /C=IT/O=INFN/OU=Host/L=Padova/CN=voms-01.pd.infn.it cdf' 'cdf voms.fnal.gov 15020 /DC=org/DC=doegrids/OU=Services/CN=http/voms.fnal.gov cdf'" VO_CDF_VOMS_CA_DN="'/C=IT/O=INFN/CN=INFN CA' '/C=IT/O=INFN/CN=INFN CA' '/DC=org/DC=DOEGrids/OU=Certificate Authorities/CN=DOEGrids CA 1'"

############# # compassit # ############# VO_COMPASSIT_SW_DIR=$VO_SW_DIR/compassit VO_COMPASSIT_DEFAULT_SE=$SE_HOST VO_COMPASSIT_STORAGE_DIR=$CLASSIC_STORAGE_DIR/compassit VO_COMPASSIT_VOMS_SERVERS="'vomss://voms2.cnaf.infn.it:8443/voms/compassit?/compassit' 'vomss://voms-02.pd.infn.it:8443/voms/compassit?/compassit'" VO_COMPASSIT_VOMSES="'compassit voms2.cnaf.infn.it 15012 /C=IT/O=INFN/OU=Host/L=CNAF/CN=voms2.cnaf.infn.it compassit' 'compassit voms-02.pd.infn.it 15012 /C=IT/O=INFN/OU=Host/L=Padova/CN=voms-02.pd.infn.it compassit'" VO_COMPASSIT_VOMS_CA_DN="'/C=IT/O=INFN/CN=INFN CA' '/C=IT/O=INFN/CN=INFN CA'"

############ # compchem # ############ VO_COMPCHEM_SW_DIR=$VO_SW_DIR/compchem VO_COMPCHEM_DEFAULT_SE=$SE_HOST VO_COMPCHEM_STORAGE_DIR=$CLASSIC_STORAGE_DIR/compchem VO_COMPCHEM_VOMS_SERVERS="'vomss://voms.cnaf.infn.it:8443/voms/compchem?/compchem' 'vomss://voms-01.pd.infn.it:8443/voms/compchem?/compchem'" VO_COMPCHEM_VOMSES="'compchem voms.cnaf.infn.it 15003 /C=IT/O=INFN/OU=Host/L=CNAF/CN=voms.cnaf.infn.it compchem' 'compchem voms-01.pd.infn.it 15003 /C=IT/O=INFN/OU=Host/L=Padova/CN=voms-01.pd.infn.it compchem'" VO_COMPCHEM_VOMS_CA_DN="'/C=IT/O=INFN/CN=INFN CA' '/C=IT/O=INFN/CN=INFN CA'"

########### # cyclops # ########### VO_CYCLOPS_SW_DIR=$VO_SW_DIR/cyclops VO_CYCLOPS_DEFAULT_SE=$SE_HOST VO_CYCLOPS_STORAGE_DIR=$CLASSIC_STORAGE_DIR/cyclops VO_CYCLOPS_VOMS_SERVERS="'vomss://voms2.cnaf.infn.it:8443/voms/cyclops?/cyclops' 'vomss://voms-02.pd.infn.it:8443/voms/cyclops?/cyclops'" VO_CYCLOPS_VOMSES="'cyclops voms2.cnaf.infn.it 15011 /C=IT/O=INFN/OU=Host/L=CNAF/CN=voms2.cnaf.infn.it cyclops' 'cyclops voms-02.pd.infn.it 15011 /C=IT/O=INFN/OU=Host/L=Padova/CN=voms-02.pd.infn.it cyclops'" VO_CYCLOPS_VOMS_CA_DN="'/C=IT/O=INFN/CN=INFN CA' '/C=IT/O=INFN/CN=INFN CA'"

######## # enea # ######## VO_ENEA_SW_DIR=$VO_SW_DIR/enea VO_ENEA_DEFAULT_SE=$SE_HOST VO_ENEA_STORAGE_DIR=$CLASSIC_STORAGE_DIR/enea VO_ENEA_VOMS_SERVERS="'vomss://voms.cnaf.infn.it:8443/voms/enea?/enea' 'vomss://voms-01.pd.infn.it:8443/voms/enea?/enea'" VO_ENEA_VOMSES="'enea voms.cnaf.infn.it 15005 /C=IT/O=INFN/OU=Host/L=CNAF/CN=voms.cnaf.infn.it enea' 'enea voms-01.pd.infn.it 15005 /C=IT/O=INFN/OU=Host/L=Padova/CN=voms-01.pd.infn.it enea'" VO_ENEA_VOMS_CA_DN="'/C=IT/O=INFN/CN=INFN CA' '/C=IT/O=INFN/CN=INFN CA'"

####### # esr # ####### VO_ESR_SW_DIR=$VO_SW_DIR/esr VO_ESR_DEFAULT_SE=$SE_HOST VO_ESR_STORAGE_DIR=$CLASSIC_STORAGE_DIR/esr VO_ESR_VOMS_SERVERS="'vomss://voms.grid.sara.nl:8443/voms/esr?/esr/'" VO_ESR_VOMSES="'esr voms.grid.sara.nl 30001 /O=dutchgrid/O=hosts/OU=sara.nl/CN=voms.grid.sara.nl esr'" VO_ESR_VOMS_CA_DN="'/C=NL/O=NIKHEF/CN=NIKHEF medium-security certification auth'"

######### # euasia# ######### VO_EUASIA_SW_DIR=$VO_SW_DIR/euasia VO_EUASIA_DEFAULT_SE=$SE_HOST #VO_EUASIA_STORAGE_DIR=$CLASSIC_STORAGE_DIR/euasia VO_EUASIA_VOMS_SERVERS="'vomss://voms.grid.sinica.edu.tw:8443/voms/euasia?/euasia'" VO_EUASIA_VOMSES="'euasia voms.grid.sinica.edu.tw 15015 /C=TW/O=AS/OU=GRID/CN=voms.grid.sinica.edu.tw euasia'" VO_EUASIA_VOMS_CA_DN="'/C=TW/O=AS/CN=Academia Sinica Grid Computing Certification Authority Mercury'"

########### # euchina # ########### VO_EUCHINA_SW_DIR=$VO_SW_DIR/euchina VO_EUCHINA_DEFAULT_SE=$SE_HOST VO_EUCHINA_STORAGE_DIR=$CLASSIC_STORAGE_DIR/euchina VO_EUCHINA_VOMS_SERVERS="'vomss://voms2.cnaf.infn.it:8443/voms/euchina?/euchina' 'vomss://voms-02.pd.infn.it:8443/voms/euchina?/euchina'" VO_EUCHINA_VOMSES="'euchina voms2.cnaf.infn.it 15017 /C=IT/O=INFN/OU=Host/L=CNAF/CN=voms2.cnaf.infn.it euchina' 'euchina voms-02.pd.infn.it 15017 /C=IT/O=INFN/OU=Host/L=Padova/CN=voms-02.pd.infn.it euchina'" VO_EUCHINA_VOMS_CA_DN="'/C=IT/O=INFN/CN=INFN CA' '/C=IT/O=INFN/CN=INFN CA'"

########### # euindia # ########### VO_EUINDIA_SW_DIR=$VO_SW_DIR/euindia VO_EUINDIA_DEFAULT_SE=$SE_HOST VO_EUINDIA_STORAGE_DIR=$CLASSIC_STORAGE_DIR/euindia VO_EUINDIA_VOMS_SERVERS="'vomss://voms2.cnaf.infn.it:8443/voms/euindia?/euindia' 'vomss://voms-02.pd.infn.it:8443/voms/euindia?/euindia'" VO_EUINDIA_VOMSES="'euindia voms2.cnaf.infn.it 15010 /C=IT/O=INFN/OU=Host/L=CNAF/CN=voms2.cnaf.infn.it euindia' 'euindia voms-02.pd.infn.it 15010 /C=IT/O=INFN/OU=Host/L=Padova/CN=voms-02.pd.infn.it euindia'" VO_EUINDIA_VOMS_CA_DN="'/C=IT/O=INFN/CN=INFN CA' '/C=IT/O=INFN/CN=INFN CA'"

######### # eumed # ######### VO_EUMED_SW_DIR=$VO_SW_DIR/eumed VO_EUMED_DEFAULT_SE=$SE_HOST VO_EUMED_STORAGE_DIR=$CLASSIC_STORAGE_DIR/eumed VO_EUMED_VOMS_SERVERS="'vomss://voms2.cnaf.infn.it:8443/voms/eumed?/eumed' 'vomss://voms-02.pd.infn.it:8443/voms/eumed?/eumed'" VO_EUMED_VOMSES="'eumed voms2.cnaf.infn.it 15016 /C=IT/O=INFN/OU=Host/L=CNAF/CN=voms2.cnaf.infn.it eumed' 'eumed voms-02.pd.infn.it 15016 /C=IT/O=INFN/OU=Host/L=Padova/CN=voms-02.pd.infn.it eumed'" VO_EUMED_VOMS_CA_DN="'/C=IT/O=INFN/CN=INFN CA' '/C=IT/O=INFN/CN=INFN CA'"

########## # geant4 # ########## VO_GEANT4_SW_DIR=$VO_SW_DIR/geant4 VO_GEANT4_DEFAULT_SE=$SE_HOST VO_GEANT4_STORAGE_DIR=$CLASSIC_STORAGE_DIR/geant4 VO_GEANT4_VOMS_SERVERS="'vomss://voms.cern.ch:8443/voms/geant4?/geant4'" VO_GEANT4_VOMSES="'geant4 voms.cern.ch 15007 /DC=ch/DC=cern/OU=computers/CN=voms.cern.ch geant4'" VO_GEANT4_VOMS_CA_DN="'/DC=ch/DC=cern/CN=CERN Trusted Certification Authority'"

######## # gear # ######## VO_GEAR_SW_DIR=$VO_SW_DIR/gear VO_GEAR_DEFAULT_SE=$SE_HOST VO_GEAR_STORAGE_DIR=$CLASSIC_STORAGE_DIR/gear VO_GEAR_VOMS_SERVERS="'vomss://voms.cern.ch:8443/voms/geant4?/geant4/gear'" VO_GEAR_VOMS_CA_DN="'/DC=ch/DC=cern/CN=CERN Trusted Certification Authority'"

######### # gilda # ######### # GILDA VO: contact Gilda Team for Gilda settings

########## # gridit # ########## VO_GRIDIT_SW_DIR=$VO_SW_DIR/gridit VO_GRIDIT_DEFAULT_SE=$SE_HOST VO_GRIDIT_STORAGE_DIR=$CLASSIC_STORAGE_DIR/gridit VO_GRIDIT_VOMS_SERVERS="'vomss://voms.cnaf.infn.it:8443/voms/gridit?/gridit' 'vomss://voms-01.pd.infn.it:8443/voms/gridit?/gridit'" VO_GRIDIT_VOMSES="'gridit voms.cnaf.infn.it 15008 /C=IT/O=INFN/OU=Host/L=CNAF/CN=voms.cnaf.infn.it gridit' 'gridit voms-01.pd.infn.it 15008 /C=IT/O=INFN/OU=Host/L=Padova/CN=voms-01.pd.infn.it gridit'" VO_GRIDIT_VOMS_CA_DN="'/C=IT/O=INFN/CN=INFN CA' '/C=IT/O=INFN/CN=INFN CA'"

######## # inaf # ######## VO_INAF_SW_DIR=$VO_SW_DIR/inaf VO_INAF_DEFAULT_SE=$SE_HOST VO_INAF_STORAGE_DIR=$CLASSIC_STORAGE_DIR/inaf VO_INAF_VOMS_SERVERS="'vomss://voms.cnaf.infn.it:8443/voms/inaf?/inaf' 'vomss://voms-01.pd.infn.it:8443/voms/inaf?/inaf'" VO_INAF_VOMSES="'inaf voms.cnaf.infn.it 15010 /C=IT/O=INFN/OU=Host/L=CNAF/CN=voms.cnaf.infn.it inaf' 'inaf voms-01.pd.infn.it 15010 /C=IT/O=INFN/OU=Host/L=Padova/CN=voms-01.pd.infn.it inaf'" VO_INAF_VOMS_CA_DN="'/C=IT/O=INFN/CN=INFN CA' '/C=IT/O=INFN/CN=INFN CA'"

############ # infngrid # ############ VO_INFNGRID_SW_DIR=$VO_SW_DIR/infngrid VO_INFNGRID_DEFAULT_SE=$SE_HOST VO_INFNGRID_STORAGE_DIR=$CLASSIC_STORAGE_DIR/infngrid VO_INFNGRID_VOMS_SERVERS="'vomss://voms.cnaf.infn.it:8443/voms/infngrid?/infngrid' 'vomss://voms-01.pd.infn.it:8443/voms/infngrid?/infngrid'" VO_INFNGRID_VOMSES="'infngrid voms.cnaf.infn.it 15000 /C=IT/O=INFN/OU=Host/L=CNAF/CN=voms.cnaf.infn.it infngrid' 'infngrid voms-01.pd.infn.it 15000 /C=IT/O=INFN/OU=Host/L=Padova/CN=voms-01.pd.infn.it infngrid'" VO_INFNGRID_VOMS_CA_DN="'/C=IT/O=INFN/CN=INFN CA' '/C=IT/O=INFN/CN=INFN CA'"

######## # libi # ######## VO_LIBI_SW_DIR=$VO_SW_DIR/libi VO_LIBI_DEFAULT_SE=$SE_HOST VO_LIBI_STORAGE_DIR=$CLASSIC_STORAGE_DIR/libi VO_LIBI_VOMS_SERVERS="'vomss://voms.cnaf.infn.it:8443/voms/libi?/libi' 'vomss://voms-01.pd.infn.it:8443/voms/libi?/libi'" VO_LIBI_VOMSES="'libi voms.cnaf.infn.it 15015 /C=IT/O=INFN/OU=Host/L=CNAF/CN=voms.cnaf.infn.it libi' 'libi voms-01.pd.infn.it 15015 /C=IT/O=INFN/OU=Host/L=Padova/CN=voms-01.pd.infn.it libi'" VO_LIBI_VOMS_CA_DN="'/C=IT/O=INFN/CN=INFN CA' '/C=IT/O=INFN/CN=INFN CA'"

######### # magic # ######### VO_MAGIC_SW_DIR=$VO_SW_DIR/magic VO_MAGIC_DEFAULT_SE=$SE_HOST VO_MAGIC_STORAGE_DIR=$CLASSIC_STORAGE_DIR/magic VO_MAGIC_VOMS_SERVERS="'vomss://voms01.pic.es:8443/voms/magic?/magic' 'vomss://voms02.pic.es:8443/voms/magic?/magic'" VO_MAGIC_VOMSES="'magic voms01.pic.es 15003 /DC=es/DC=irisgrid/O=pic/CN=voms01.pic.es magic' 'magic voms02.pic.es 15003 /DC=es/DC=irisgrid/O=pic/CN=voms02.pic.es magic'" VO_MAGIC_VOMS_CA_DN="'/DC=es/DC=irisgrid/CN=IRISGridCA' '/DC=es/DC=irisgrid/CN=IRISGridCA'"

######## # na48 # ######## VO_NA48_SW_DIR=$VO_SW_DIR/na48 VO_NA48_DEFAULT_SE=$SE_HOST VO_NA48_STORAGE_DIR=$CLASSIC_STORAGE_DIR/na48 VO_NA48_VOMS_SERVERS="'vomss://lcg-voms.cern.ch:8443/voms/na48?/na48' 'vomss://voms.cern.ch:8443/voms/na48?/na48'" VO_NA48_VOMSES="'na48 lcg-voms.cern.ch 15011 /DC=ch/DC=cern/OU=computers/CN=lcg-voms.cern.ch na48' 'na48 voms.cern.ch 15011 DC=ch/DC=cern/OU=computers/CN=voms.cern.ch na48'" VO_NA48_VOMS_CA_DN="'/DC=ch/DC=cern/CN=CERN Trusted Certification Authority' '/DC=ch/DC=cern/CN=CERN Trusted Certification Authority'"

########## # pamela # ########## VO_PAMELA_SW_DIR=$VO_SW_DIR/pamela VO_PAMELA_DEFAULT_SE=$SE_HOST VO_PAMELA_STORAGE_DIR=$CLASSIC_STORAGE_DIR/pamela VO_PAMELA_VOMS_SERVERS="'vomss://voms.cnaf.infn.it:8443/voms/pamela?/pamela' 'vomss://voms-01.pd.infn.it:8443/voms/pamela?/pamela'" VO_PAMELA_VOMSES="'pamela voms.cnaf.infn.it 15013 /C=IT/O=INFN/OU=Host/L=CNAF/CN=voms.cnaf.infn.it pamela' 'pamela voms-01.pd.infn.it 15013 /C=IT/O=INFN/OU=Host/L=Padova/CN=voms-01.pd.infn.it pamela'" VO_PAMELA_VOMS_CA_DN="'/C=IT/O=INFN/CN=INFN CA' '/C=IT/O=INFN/CN=INFN CA'"

########## # planck # ########## VO_PLANCK_SW_DIR=$VO_SW_DIR/planck VO_PLANCK_DEFAULT_SE=$SE_HOST VO_PLANCK_STORAGE_DIR=$CLASSIC_STORAGE_DIR/planck VO_PLANCK_VOMS_SERVERS="'vomss://voms.cnaf.infn.it:8443/voms/planck?/planck' 'vomss://voms-01.pd.infn.it:8443/voms/planck?/planck'" VO_PLANCK_VOMSES="'planck voms.cnaf.infn.it 15002 /C=IT/O=INFN/OU=Host/L=CNAF/CN=voms.cnaf.infn.it planck' 'planck voms-01.pd.infn.it 15002 /C=IT/O=INFN/OU=Host/L=Padova/CN=voms-01.pd.infn.it planck'" VO_PLANCK_VOMS_CA_DN="'/C=IT/O=INFN/CN=INFN CA' '/C=IT/O=INFN/CN=INFN CA'"

######## # sixt # ######## VO_SIXT_SW_DIR=$VO_SW_DIR/sixt VO_SIXT_DEFAULT_SE=$SE_HOST VO_SIXT_STORAGE_DIR=$CLASSIC_STORAGE_DIR/sixt VO_SIXT_VOMS_SERVERS="'vomss://voms.cern.ch:8443/voms/sixt?/sixt'" VO_SIXT_VOMSES="'sixt voms.cern.ch 15005 /DC=ch/DC=cern/OU=computers/CN=voms.cern.ch sixt'" VO_SIXT_VOMS_CA_DN="'/DC=ch/DC=cern/CN=CERN Trusted Certification Authority'"

############ # theophys # ############ VO_THEOPHYS_SW_DIR=$VO_SW_DIR/theophys VO_THEOPHYS_DEFAULT_SE=$SE_HOST VO_THEOPHYS_STORAGE_DIR=$CLASSIC_STORAGE_DIR/theophys VO_THEOPHYS_VOMS_SERVERS="'vomss://voms.cnaf.infn.it:8443/voms/theophys?/theophys' 'vomss://voms-01.pd.infn.it:8443/voms/theophys?/theophys'" VO_THEOPHYS_VOMSES="'theophys voms.cnaf.infn.it 15006 /C=IT/O=INFN/OU=Host/L=CNAF/CN=voms.cnaf.infn.it theophys' 'theophys voms-01.pd.infn.it 15006 /C=IT/O=INFN/OU=Host/L=Padova/CN=voms-01.pd.infn.it theophys'" VO_THEOPHYS_VOMS_CA_DN="'/C=IT/O=INFN/CN=INFN CA' '/C=IT/O=INFN/CN=INFN CA'"

######### # virgo # ######### VO_VIRGO_SW_DIR=$VO_SW_DIR/virgo VO_VIRGO_DEFAULT_SE=$SE_HOST VO_VIRGO_STORAGE_DIR=$CLASSIC_STORAGE_DIR/virgo VO_VIRGO_VOMS_SERVERS="'vomss://voms.cnaf.infn.it:8443/voms/virgo?/virgo' 'vomss://voms-01.pd.infn.it:8443/voms/virgo?/virgo'" VO_VIRGO_VOMSES="'virgo voms.cnaf.infn.it 15009 /C=IT/O=INFN/OU=Host/L=CNAF/CN=voms.cnaf.infn.it virgo' 'virgo voms-01.pd.infn.it 15009 /C=IT/O=INFN/OU=Host/L=Padova/CN=voms-01.pd.infn.it virgo'" VO_VIRGO_VOMS_CA_DN="'/C=IT/O=INFN/CN=INFN CA' '/C=IT/O=INFN/CN=INFN CA'"

######## # zeus # ######## VO_ZEUS_SW_DIR=$VO_SW_DIR/zeus VO_ZEUS_DEFAULT_SE=$SE_HOST VO_ZEUS_STORAGE_DIR=$CLASSIC_STORAGE_DIR/zeus VO_ZEUS_VOMS_SERVERS="'vomss://grid-voms.desy.de:8443/voms/zeus?/zeus'" VO_ZEUS_VOMSES="'zeus grid-voms.desy.de 15112 /C=DE/O=GermanGrid/OU=DESY/CN=host/grid-voms.desy.de zeus'" VO_ZEUS_VOMS_CA_DN="'/C=DE/O=GermanGrid/CN=GridKa-CA'"

-- AlessandroPaolini - 2012-02-10 \ No newline at end of file

 
This site is powered by the TWiki collaboration platformCopyright © 2008-2024 by the contributing authors. All material on this collaboration platform is the property of the contributing authors.
Ideas, requests, problems regarding TWiki? Send feedback