[CRAB] jobtype = cmssw scheduler = glite ### NOTE: just setting the name of the server (pi, lnl etc etc ) ### crab will submit the jobs to the server... #server_name = cnaf [CMSSW] ### The data you want to access (to be found on DBS) #datasetpath=/ttbar_inclusive_TopRex/CMSSW_1_3_1-Spring07-1122/GEN-SIM-DIGI-RECO datasetpath=none ### The ParameterSet you want to use pset=ttbar-local.py ### Splitting parameters #total_number_of_events=-1 #total_number_of_events=10 events_per_job = 1 number_of_jobs = 5 ### The output files (comma separated list) output_file = ttbar.root, ttbar.random [USER] ### OUTPUT files Management ## output back into UI return_data = 1 ### OUTPUT files INTO A SE copy_data = 0 storage_element = srm.cern.ch storage_path = /castor/cern.ch/user/u/user #if server #thresholdLevel = 100 #eMail = your@Email.address [EDG] ## RB/WMS management: rb = CNAF ## Black and White Lists management: ## By Storage #se_black_list = #se_white_list = ## By ComputingElement #ce_black_list = #ce_white_list = [CONDORG] # Set this to condor to override the batchsystem defined in gridcat. #batchsystem = condor # Specify addition condor_g requirments # use this requirment to run on a cms dedicated hardare # globus_rsl = (condor_submit=(requirements 'ClusterName == \"CMS\" && (Arch == \"INTEL\" || Arch == \"X86_64\")')) # use this requirement to run on the new hardware #globus_rsl = (condor_submit=(requirements 'regexp(\"cms-*\",Machine)'))