本文整理汇总了Python中pycbc.workflow.core.Node类的典型用法代码示例。如果您正苦于以下问题:Python Node类的具体用法?Python Node怎么用?Python Node使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了Node类的14个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。
示例1: create_node
def create_node(self, stat_files, tags=None):
if tags is None:
tags = []
node = Node(self)
node.add_input_list_opt('--statmap-files', stat_files)
node.new_output_file_opt(stat_files[0].segment, '.hdf', '--output-file', tags=tags)
return node
开发者ID:aravind-pazhayath,项目名称:pycbc,代码行数:7,代码来源:coincidence.py
示例2: create_node
def create_node(self, parent, inj_trigs, inj_string, max_inc, segment):
node = Node(self)
trig_name = self.cp.get("workflow", "trigger-name")
node.add_opt("--inj-string", inj_string)
node.add_opt("--max-inclination", max_inc)
node.add_opt("--inj-cache", "%s" % parent.storage_path)
out_files = FileList([])
for inj_trig in inj_trigs:
out_string = inj_string.split(max_inc)[0]
out_file_tag = [out_string, "FILTERED", max_inc, inj_trig.tag_str.rsplit("_", 1)[-1]]
out_file = File(
self.ifos,
inj_trig.description,
inj_trig.segment,
extension="xml",
directory=self.out_dir,
tags=out_file_tag,
)
out_file.PFN(out_file.cache_entry.path, site="local")
out_files.append(out_file)
node.add_opt("--output-dir", self.out_dir)
return node, out_files
开发者ID:vitale82,项目名称:pycbc,代码行数:26,代码来源:legacy_ihope.py
示例3: create_node
def create_node(self, parent, inj_trigs, inj_string, max_inc, segment):
node = Node(self)
trig_name = self.cp.get('workflow', 'trigger-name')
node.add_opt('--inj-string', inj_string)
node.add_opt('--max-inclination', max_inc)
node.add_opt('--inj-cache', '%s' % parent.storage_path)
out_files = FileList([])
for inj_trig in inj_trigs:
out_file_tag = [
inj_string, "FILTERED", max_inc,
inj_trig.tag_str.rsplit('_', 1)[-1]
]
out_file = File(
self.ifos,
inj_trig.description,
inj_trig.segment,
extension="xml",
directory=self.out_dir,
tags=out_file_tag)
out_file.PFN(out_file.cache_entry.path, site="local")
out_files.append(out_file)
node.add_opt('--output-dir', self.out_dir)
return node, out_files
开发者ID:shinsei90,项目名称:pycbc,代码行数:27,代码来源:legacy_ihope.py
示例4: create_node
def create_node(self, inj_coinc_file, inj_xml_file, veto_file, veto_name, tags=[]):
node = Node(self)
node.add_input_list_opt('--trigger-file', inj_coinc_file)
node.add_input_list_opt('--injection-file', inj_xml_file)
if veto_name is not None:
node.add_input_opt('--veto-file', veto_file)
node.add_opt('--segment-name', veto_name)
node.new_output_file_opt(inj_xml_file[0].segment, '.hdf', '--output-file',
tags=tags)
return node
开发者ID:alex-nielsen,项目名称:pycbc,代码行数:10,代码来源:coincidence.py
示例5: create_node
def create_node(self, coinc_files, ifos, tags=None):
if tags is None:
tags = []
segs = coinc_files.get_times_covered_by_files()
seg = segments.segment(segs[0][0], segs[-1][1])
node = Node(self)
node.set_memory(5000)
node.add_input_list_opt('--coinc-files', coinc_files)
node.add_opt('--ifos', ifos)
node.new_output_file_opt(seg, '.hdf', '--output-file', tags=tags)
return node
开发者ID:josh-willis,项目名称:pycbc,代码行数:12,代码来源:coincidence.py
示例6: create_node
def create_node(self, coinc_files, bank_file, background_bins, tags=[]):
node = Node(self)
node.add_input_list_opt('--coinc-files', coinc_files)
node.add_input_opt('--bank-file', bank_file)
node.add_opt('--background-bins', ' '.join(background_bins))
names = [b.split(':')[0] for b in background_bins]
output_files = [File(coinc_files[0].ifo_list,
self.name,
coinc_files[0].segment,
directory=self.out_dir,
tags = tags + ['mbin-%s' % i],
extension='.hdf') for i in range(len(background_bins))]
node.add_output_list_opt('--output-files', output_files)
node.names = names
return node
开发者ID:vaibhavtewari,项目名称:pycbc,代码行数:17,代码来源:coincidence.py
示例7: create_node
def create_node(self, parent, tags=None):
import Pegasus.DAX3 as dax
if tags is None:
tags = []
node = Node(self)
# Set input / output options
node.add_opt('--trig-file', '%s' % parent.storage_path)
#node._dax_node.uses(parent, link=dax.Link.INPUT, register=False,
# transfer=False)
#node._inputs += [parent]
node.add_opt('--output-dir', self.out_dir)
node.add_profile('condor', 'request_cpus', self.num_threads)
# Adding output files as pycbc.workflow.core.File objects
out_file = File(self.ifos, 'INSPIRAL', parent.segment,
directory=self.out_dir, extension='xml.gz',
tags=[parent.tag_str, 'CLUSTERED'],
store_file=self.retain_files)
out_file.PFN(out_file.cache_entry.path, site="local")
#node._dax_node.uses(out_file, link=dax.Link.OUTPUT, register=False,
# transfer=False)
#node._outputs += [out_file]
out_file.node = node
#node._add_output(out_file)
return node, FileList([out_file])
开发者ID:ligo-cbc,项目名称:pycbc-pylal,代码行数:29,代码来源:legacy_ihope.py
示例8: setup_plotthinca
def setup_plotthinca(workflow, input_files, cache_filename, coinc_cachepattern,
slide_cachepattern, output_dir, tags=[], **kwargs):
"""
This function sets up the nodes that will generate summary from a list of
thinca files.
Parameters
-----------
Workflow : ahope.Workflow
The ahope workflow instance that the coincidence jobs will be added to.
input_files : ahope.FileList
An FileList of files that are used as input at this stage.
cache_filename : str
Filename of the ihope cache.
coinc_cachepattern : str
The pattern that will be used to find zero-lag coincidence filenames in the cache.
slide_cachepattern : str
The pattern that will be used to find time slide filenames in the cache.
output_dir : path
The directory in which output files will be stored.
tags : list of strings (optional, default = [])
A list of the tagging strings that will be used for all jobs created
by this call to the workflow. An example might be ['full_data'].
This will be used in output names and directories.
Returns
--------
plot_files : ahope.FileList
A list of the output files from this stage.
"""
plot_files = FileList([])
# create executable
plotthinca_job = Executable(workflow.cp, 'plotthinca', 'vanilla',
workflow.ifos, output_dir, tags)
# get all ifo combinations of at least 2 coincident ifos
ifo_combos = []
for n in xrange(len(plotthinca_job.ifo_list)+1):
for ifo_list in itertools.combinations(plotthinca_job.ifo_list, n+2):
ifo_combos.append(ifo_list)
for tag in tags:
for ifo_list in ifo_combos:
ifo_string = ''.join(ifo_list)
# create node
node = Node(plotthinca_job)
node.add_opt('--gps-start-time', workflow.analysis_time[0])
node.add_opt('--gps-end-time', workflow.analysis_time[1])
node.add_opt('--cache-file', cache_filename)
node.add_opt('--ifo-times', ifo_string)
node.add_opt('--ifo-tag', 'SECOND_'+ifo_string)
for ifo in ifo_list:
node.add_opt('--%s-triggers'%ifo.lower(), '')
node.add_opt('--user-tag', tag.upper()+'_SUMMARY_PLOTS')
node.add_opt('--output-path', output_dir)
node.add_opt('--coinc-pattern', coinc_cachepattern)
node.add_opt('--slide-pattern', slide_cachepattern)
node.add_opt('--enable-output')
# add node to workflow
workflow.add_node(node)
# make all input_files parents
#for f in input_files:
# dep = dax.Dependency(parent=f.node._dax_node, child=node._dax_node)
# workflow._adag.addDependency(dep)
return plot_files
开发者ID:jakerobertandrews,项目名称:pycbc,代码行数:71,代码来源:summaryplots.py
示例9: setup_hardware_injection_page
def setup_hardware_injection_page(workflow, input_files, cache_filename,
inspiral_cachepattern, output_dir, tags=[], **kwargs):
"""
This function sets up the nodes that will create the hardware injection page.
Parameters
-----------
Workflow : ahope.Workflow
The ahope workflow instance that the coincidence jobs will be added to.
input_files : ahope.FileList
An FileList of files that are used as input at this stage.
cache_filename : str
Filename of the ihope cache.
inspiral_cachepattern : str
The pattern that will be used to find inspiral filenames in the cache.
output_dir : path
The directory in which output files will be stored.
tags : list of strings (optional, default = [])
A list of the tagging strings that will be used for all jobs created
by this call to the workflow. An example might be ['full_data'].
This will be used to search the cache.
Returns
--------
plot_files : ahope.FileList
A list of the output files from this stage.
"""
logging.info("Entering hardware injection page setup.")
out_files = FileList([])
# check if hardware injection section exists
# if not then do not do add hardware injection job to the workflow
if not workflow.cp.has_section('workflow-hardware-injections'):
msg = "There is no workflow-hardware-injections section. "
msg += "The hardware injection page will not be added to the workflow."
logging.info(msg)
logging.info("Leaving hardware injection page setup.")
return out_files
# make the output dir
if not os.path.exists(output_dir):
os.makedirs(output_dir)
# create executable
hwinjpage_job = Executable(workflow.cp, 'hardware_injection_page',
'vanilla', workflow.ifos, output_dir, tags)
# retrieve hardware injection file
hwinjDefUrl = workflow.cp.get_opt_tags('workflow-hardware-injections',
'hwinj-definer-url', tags)
hwinjDefBaseName = os.path.basename(hwinjDefUrl)
hwinjDefNewPath = os.path.join(output_dir, hwinjDefBaseName)
urllib.urlretrieve (hwinjDefUrl, hwinjDefNewPath)
# update hwinj definer file location
workflow.cp.set("workflow-hardware-injections", "hwinj-definer-file",
hwinjDefNewPath)
# query for the hardware injection segments
get_hardware_injection_segment_files(workflow, output_dir, hwinjDefNewPath)
# create node
node = Node(hwinjpage_job)
node.add_opt('--gps-start-time', workflow.analysis_time[0])
node.add_opt('--gps-end-time', workflow.analysis_time[1])
node.add_opt('--source-xml', hwinjDefNewPath)
node.add_opt('--segment-dir', output_dir)
node.add_opt('--cache-file', cache_filename)
node.add_opt('--cache-pattern', inspiral_cachepattern)
node.add_opt('--analyze-injections', '')
for ifo in workflow.ifos:
node.add_opt('--%s-injections'%ifo.lower(), '')
outfile = File(node.executable.ifo_string, 'HWINJ_SUMMARY',
workflow.analysis_time, extension='html', directory=output_dir)
node.add_opt('--outfile', outfile.storage_path)
# add node to workflow
workflow.add_node(node)
# make all input_files parents
#for f in input_files:
# dep = dax.Dependency(parent=f.node._dax_node, child=node._dax_node)
# workflow._adag.addDependency(dep)
out_files += node.output_files
logging.info("Leaving hardware injection page setup.")
return out_files
开发者ID:jakerobertandrews,项目名称:pycbc,代码行数:91,代码来源:summaryplots.py
示例10: get_veto_segs
def get_veto_segs(workflow, ifo, category, start_time, end_time, out_dir,
vetoGenJob, tag=None, execute_now=False):
"""
Obtain veto segments for the selected ifo and veto category and add the job
to generate this to the workflow.
Parameters
-----------
workflow: pycbc.workflow.core.Workflow
An instance of the Workflow class that manages the workflow.
ifo : string
The string describing the ifo to generate vetoes for.
category : int
The veto category to generate vetoes for.
start_time : gps time (either int/LIGOTimeGPS)
The time at which to begin searching for segments.
end_time : gps time (either int/LIGOTimeGPS)
The time at which to stop searching for segments.
out_dir : path
The directory in which output will be stored.
vetoGenJob : Job
The veto generation Job class that will be used to create the Node.
tag : string, optional (default=None)
Use this to specify a tag. This can be used if this module is being
called more than once to give call specific configuration (by setting
options in [workflow-datafind-${TAG}] rather than [workflow-datafind]). This
is also used to tag the Files returned by the class to uniqueify
the Files and uniqueify the actual filename.
FIXME: Filenames may not be unique with current codes!
execute_now : boolean, optional
If true, jobs are executed immediately. If false, they are added to the
workflow to be run later.
Returns
--------
veto_def_file : pycbc.workflow.core.OutSegFile
The workflow File object corresponding to this DQ veto file.
"""
segValidSeg = segments.segment([start_time,end_time])
node = Node(vetoGenJob)
node.add_opt('--veto-categories', str(category))
node.add_opt('--ifo-list', ifo)
node.add_opt('--gps-start-time', str(start_time))
node.add_opt('--gps-end-time', str(end_time))
vetoXmlFileName = "%s-VETOTIME_CAT%d-%d-%d.xml" \
%(ifo, category, start_time, end_time-start_time)
vetoXmlFilePath = os.path.abspath(os.path.join(out_dir, vetoXmlFileName))
currUrl = urlparse.urlunparse(['file', 'localhost',
vetoXmlFilePath, None, None, None])
if tag:
currTags = [tag, 'VETO_CAT%d' %(category)]
else:
currTags = ['VETO_CAT%d' %(category)]
vetoXmlFile = OutSegFile(ifo, 'SEGMENTS', segValidSeg, currUrl,
tags=currTags)
node._add_output(vetoXmlFile)
if execute_now:
if file_needs_generating(vetoXmlFile.cache_entry.path):
workflow.execute_node(node)
else:
node.executed = True
for fil in node._outputs:
fil.node = None
fil.PFN(fil.storage_path, site='local')
else:
workflow.add_node(node)
return vetoXmlFile
开发者ID:jsread,项目名称:pycbc,代码行数:69,代码来源:segment.py
示例11: setup_plotnumtemplates
def setup_plotnumtemplates(workflow, input_files, cache_filename,
tmpltbank_cachepattern, output_dir, tags=[],
**kwargs):
"""
This function sets up the nodes that will generate a plot of the number
of templates against time.
Parameters
-----------
Workflow : ahope.Workflow
The ahope workflow instance that the coincidence jobs will be added to.
input_files : ahope.FileList
An FileList of files that are used as input at this stage.
cache_filename : str
Filename of the ihope cache.
tmpltbank_cachepattern : str
The pattern that will be used to find template_bank filenames in the cache.
output_dir : path
The directory in which output files will be stored.
tags : list of strings (optional, default = [])
A list of the tagging strings that will be used for all jobs created
by this call to the workflow. An example might be ['full_data'].
This will be used in output names and directories.
Returns
--------
plot_files : ahope.FileList
A list of the output files from this stage.
"""
plot_files = FileList([])
# create executable
plotnumtemplates_job = Executable(workflow.cp, 'plotnumtemplates',
'vanilla', workflow.ifos, output_dir, tags)
for tag in tags:
# create node
node = Node(plotnumtemplates_job)
node.add_opt('--gps-start-time', workflow.analysis_time[0])
node.add_opt('--gps-end-time', workflow.analysis_time[1])
node.add_opt('--cache-file', cache_filename)
node.add_opt('--ifo-times', node.executable.ifo_string)
node.add_opt('--user-tag', tag.upper()+'_SUMMARY_PLOTS')
node.add_opt('--output-path', output_dir)
node.add_opt('--bank-pattern', tmpltbank_cachepattern)
node.add_opt('--enable-output')
# add node to workflow
workflow.add_node(node)
# make all input_files parents
#for f in input_files:
# dep = dax.Dependency(parent=f.node._dax_node, child=node._dax_node)
# workflow._adag.addDependency(dep)
return plot_files
开发者ID:jakerobertandrews,项目名称:pycbc,代码行数:57,代码来源:summaryplots.py
示例12: create_node
def create_node(self, trig_files=None, segment_dir=None, analysis_seg=None,
out_tags=[], tags=[]):
node = Node(self)
if not trig_files:
raise ValueError("%s must be supplied with trigger files"
% self.name)
# Data options
num_trials = int(self.cp.get("trig_combiner", "num-trials"))
trig_name = self.cp.get('workflow', 'trigger-name')
if all("COHERENT_NO_INJECTIONS" in t.name for t in trig_files) and \
self.cp.has_option_tag('inspiral', 'do-short-slides',
'coherent_no_injections'):
node.add_opt('--short-slides')
node.add_opt('--grb-name', trig_name)
node.add_opt('--trig-start-time', analysis_seg[0])
node.add_opt('--ifo-tag', self.ifos)
node.add_opt('--user-tag', 'INSPIRAL')
# Set input / output options
node.add_input_list_opt('--input-files', trig_files)
node.add_opt('--segment-dir', segment_dir)
node.add_opt('--output-dir', self.out_dir)
out_files = FileList([])
for out_tag in out_tags:
out_file = File(self.ifos, 'INSPIRAL', trig_files[0].segment,
directory=self.out_dir, extension='xml.gz',
tags=["GRB%s" % trig_name, out_tag],
store_file=self.retain_files)
out_files.append(out_file)
for trial in range(1, num_trials + 1):
out_file = File(self.ifos, 'INSPIRAL', trig_files[0].segment,
directory=self.out_dir, extension='xml.gz',
tags=["GRB%s" % trig_name, "OFFTRIAL_%d" % trial],
store_file=self.retain_files)
out_files.append(out_file)
node.add_profile('condor', 'request_cpus', self.num_threads)
return node, out_files
开发者ID:gayathrigcc,项目名称:pycbc,代码行数:46,代码来源:legacy_ihope.py
示例13: create_node
def create_node(self, raw_fit_file, bank_file):
node = Node(self)
node.add_input_opt('--template-fit-file', raw_fit_file)
node.add_input_opt('--template-file', bank_file)
node.new_output_file_opt(raw_fit_file.segment, '.hdf', '--output')
return node
开发者ID:sfairhur,项目名称:pycbc,代码行数:6,代码来源:coincidence.py
示例14: create_node
def create_node(self, parent=None, offsource_file=None, seg_dir=None,
found_file=None, missed_file=None, tags=[]):
node = Node(self)
if not parent:
raise ValueError("%s must be supplied with trigger files"
% self.name)
# Set input / output options
node.add_opt('--onsource-file', '%s' % parent.storage_path)
node.add_opt('--offsource-file', '%s' % offsource_file.storage_path)
node.add_opt('--veto-directory', seg_dir)
node.add_opt('--segment-dir', seg_dir)
if found_file and missed_file:
node.add_opt('--found-file', '%s' % found_file.storage_path)
node.add_opt('--missed-file', '%s' % missed_file.storage_path)
out_dir = "%s/output/%s/efficiency_%s" % (self.out_dir, tags[1],
tags[0])
elif found_file or missed_file:
if found_file:
present = found_file
else:
present = missed_file
raise ValueError("Must either be supplied with no injection files "
"or both missed and found injection files. "
"Received only %s" % present.name)
else:
out_dir = "%s/output/%s/efficiency" % (self.out_dir, tags[0])
node.add_opt('--output-path', out_dir)
node.add_profile('condor', 'request_cpus', self.num_threads)
return node
开发者ID:vaibhavtewari,项目名称:pycbc,代码行数:37,代码来源:legacy_ihope.py
注:本文中的pycbc.workflow.core.Node类示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论