standard_reco_dunefd.fcl
Go to the documentation of this file.
1 #include "services_dune.fcl"
2 #include "caldata_dune.fcl"
3 #include "cluster_dune.fcl"
4 #include "trackfindermodules.fcl"
5 
6 process_name: Reco
7 
8 services:
9 {
10  # Load the service that manages root files for histograms.
11  TFileService: { fileName: "recofd_hist.root" }
12  TimeTracker: {}
13  RandomNumberGenerator: {} #ART native random number generator
14  message: @local::standard_info
15  @table::dunefd_services
16 }
17 
18 
19 #source is now a root file
20 source:
21 {
22  module_type: RootInput
23  maxEvents: 1
24 }
25 
26 # Define and configure some modules to do work on each event.
27 # First modules are defined; they are scheduled later.
28 # Modules are grouped by type.
29 physics:
30 {
31 
32  producers:
33  {
34  calgaus: @local::dunefd_calgaushf
35  caldata: @local::dunefd_calwire
36  gaushit: @local::dunefd_gaushitfinder
37  hitcheat: @local::dunefd_hitcheater
38  apacheat: @local::standard_disambigcheat
39  apahit: @local::dunefd_apahitfinder
40  fuzzy: @local::dunefd_fuzzycluster
41  }
42 
43  analyzers:
44  {
45  }
46 
47  reco: [ caldata, gaushit, hitcheat, apahit ]
48  ana: [ ]
49 
50  stream1: [ out1 ]
51 
52  trigger_paths: [ reco ]
53 
54  end_paths: [ ana, stream1]
55 }
56 
57 #block to define where the output goes. if you defined a filter in the physics
58 #block and put it in the trigger_paths then you need to put a SelectEvents: {SelectEvents: [XXX]}
59 #entry in the output stream you want those to go to, where XXX is the label of the filter module(s)
60 outputs:
61 {
62  out1:
63  {
64  module_type: RootOutput
65  fileName: "reco_dunefd.root" #default file name, can override from command line with -o or --output
66 # outputCommands: [ "keep *", "drop *_caldata_*_*" ]
67  }
68 }
69