3 from __future__
import absolute_import
4 from __future__
import print_function
8 import sys, os, subprocess, json, stream
10 import larbatch_utilities
11 from larbatch_utilities
import convert_str
12 import project_utilities
17 sys.argv = myargv[0:1]
20 if 'TERM' in os.environ:
21 del os.environ[
'TERM']
23 ROOT.gErrorIgnoreLevel = ROOT.kError
29 warnings.filterwarnings(
'ignore', category = RuntimeWarning, message =
'creating converter.*')
35 filesize =
int(filesize)
36 size =
int(filesize % 65521)
38 s2 = ((crc >> 16) & 0xffff)
39 s1 = (s1 + 65521 - 1) % 65521
40 s2 = (s2 + 65521 - size) % 65521
41 return (s2 << 16) + s1
48 readblocksize = 1024*1024
52 s = fileobj.read(readblocksize)
53 except (OSError, IOError)
as ex:
56 crc = zlib.adler32(s,crc)
60 crc = (crc & 0x7FFFFFFF) | 0x80000000
61 return {
"crc_value" :
str(crc),
"crc_type" :
"adler 32 crc type" }
64 """Calculate enstore compatible CRC value""" 67 srm_url = project_utilities.path_to_srm_url(path)
71 f = larbatch_posix.open(path,
'rb')
73 except (IOError, OSError)
as ex:
90 project_utilities.test_proxy()
91 cmd = [
'srmls',
'-2',
'-l', srm_url]
95 for line
in srmout.split(
'\n'):
97 size =
int(line[2:line.find(
'/')-1])
100 if line.find(
"Checksum value:") > 0:
101 ssum = line[line.find(
':') + 2:]
102 crc1 =
int( ssum , base = 16 )
106 crc = {
"crc_value":
str(crc0),
"crc_type":
"adler 32 crc type"}
110 cmd = [
'ifdh',
'cp', path,
'/dev/fd/1']
111 p = subprocess.Popen(cmd, stdout=subprocess.PIPE)
122 if not os.path.exists(inputfile):
126 md[
'file_name'] = os.path.basename(inputfile)
127 md[
'file_size'] =
str(os.path.getsize(inputfile))
132 if not inputfile.endswith(
'.root'):
137 ROOT.gEnv.SetValue(
'RooFit.Banner',
'0')
138 file = ROOT.TFile.Open(larbatch_posix.root_stream(inputfile))
139 if file
and file.IsOpen()
and not file.IsZombie():
144 obj = file.Get(
'Events')
145 if obj
and obj.InheritsFrom(
'TTree'):
149 nev = obj.GetEntriesFast()
150 md[
'events'] =
str(nev)
154 subrun_tree = file.Get(
'SubRuns')
155 if subrun_tree
and subrun_tree.InheritsFrom(
'TTree'):
157 nsubruns = subrun_tree.GetEntriesFast()
158 tfr = ROOT.TTreeFormula(
'subruns',
159 'SubRunAuxiliary.id_.run_.run_',
161 tfs = ROOT.TTreeFormula(
'subruns',
162 'SubRunAuxiliary.id_.subRun_',
164 for entry
in range(nsubruns):
165 subrun_tree.GetEntry(entry)
166 run = tfr.EvalInstance64()
167 subrun = tfs.EvalInstance64()
168 run_subrun = (run, subrun)
169 if not run_subrun
in md[
'subruns']:
170 md[
'subruns'].append(run_subrun)
175 stream_name = stream.get_stream(inputfile)
176 md[
'data_stream'] = stream_name
182 if __name__ ==
"__main__":
186 Parser = argparse.ArgumentParser \
187 (description=
"Extracts metadata for a ROOT file.")
189 Parser.add_argument(
"InputFile", help=
"ROOT file to extract metadata about")
190 Parser.add_argument(
"--output",
"-o", dest=
"OutputFile", default=
None,
191 help=
"JSON file to write the output to [default: screen]" 194 args = Parser.parse_args()
197 mdtext = json.dumps(md, indent=2, sort_keys=
True)
199 outputFile =
open(args.OutputFile,
'w')
if args.OutputFile
else sys.stdout
200 print(mdtext, file=outputFile)
int open(const char *, int)
Opens a file descriptor.
nvidia::inferenceserver::client::Error Error