_id stringlengths 2 7 | title stringlengths 1 88 | partition stringclasses 3
values | text stringlengths 31 13.1k | language stringclasses 1
value | meta_information dict |
|---|---|---|---|---|---|
q253300 | HeronExecutor._start_processes | validation | def _start_processes(self, commands):
"""Start all commands and add them to the dict of processes to be monitored """
Log.info("Start processes")
processes_to_monitor = {}
# First start all the processes
for (name, command) in commands.items():
p = self._run_process(name, | python | {
"resource": ""
} |
q253301 | HeronExecutor.start_process_monitor | validation | def start_process_monitor(self):
""" Monitor all processes in processes_to_monitor dict,
restarting any if they fail, up to max_runs times.
"""
# Now wait for any child to die
Log.info("Start process monitor")
while True:
if len(self.processes_to_monitor) > 0:
(pid, status) = os.wa... | python | {
"resource": ""
} |
q253302 | HeronExecutor.get_commands_to_run | validation | def get_commands_to_run(self):
"""
Prepare either TMaster or Streaming commands according to shard.
The Shell command is attached to all containers. The empty container plan and non-exist
container plan are bypassed.
"""
# During shutdown the watch might get triggered with the empty packing plan... | python | {
"resource": ""
} |
q253303 | HeronExecutor.launch | validation | def launch(self):
''' Determines the commands to be run and compares them with the existing running commands.
Then starts new ones required and kills old ones no longer required.
'''
with self.process_lock:
current_commands = dict(map((lambda process: (process.name, process.command)),
... | python | {
"resource": ""
} |
q253304 | HeronExecutor.start_state_manager_watches | validation | def start_state_manager_watches(self):
"""
Receive updates to the packing plan from the statemgrs and update processes as needed.
"""
Log.info("Start state manager watches")
statemgr_config = StateMgrConfig()
statemgr_config.set_state_locations(configloader.load_state_manager_locations(
... | python | {
"resource": ""
} |
q253305 | Runner.run | validation | def run(self, name, config, builder):
"""Builds the topology and submits it"""
if not isinstance(name, str):
raise RuntimeError("Name has to be a string type")
if not isinstance(config, Config):
| python | {
"resource": ""
} |
q253306 | _modules_to_main | validation | def _modules_to_main(modList):
"""Force every module in modList to be placed into main"""
if not modList:
return
main = sys.modules['__main__']
for modname in modList:
if isinstance(modname, str):
try:
mod = __import__(modname)
except Exception:
sys.stderr.write(
... | python | {
"resource": ""
} |
q253307 | _load_class | validation | def _load_class(cls, d):
"""
Loads additional properties into class `cls`.
"""
for k, v in d.items():
if isinstance(k, tuple):
typ, k = k
if typ == 'property':
v = property(*v)
elif typ == 'staticmethod':
v = staticmethod(v) # | python | {
"resource": ""
} |
q253308 | CloudPickler.save_module | validation | def save_module(self, obj):
"""
Save a module as an import
"""
| python | {
"resource": ""
} |
q253309 | CloudPickler.save_file | validation | def save_file(self, obj): # pylint: disable=too-many-branches
"""Save a file"""
try:
import StringIO as pystringIO #we can't use cStringIO as it lacks the name attribute
except ImportError:
import io as pystringIO # pylint: disable=reimported
if not hasattr(obj, 'name') or not hasattr(obj,... | python | {
"resource": ""
} |
q253310 | tail | validation | def tail(filename, n):
"""Returns last n lines from the filename. No exception handling"""
size = os.path.getsize(filename)
with open(filename, "rb") as f:
fm = mmap.mmap(f.fileno(), 0, mmap.MAP_SHARED, mmap.PROT_READ)
try:
for i in xrange(size - 1, -1, -1):
if fm[i] == '\n':
| python | {
"resource": ""
} |
q253311 | SerializerHelper.get_serializer | validation | def get_serializer(context):
"""Returns a serializer for a given context"""
cluster_config = context.get_cluster_config()
serializer_clsname = cluster_config.get(constants.TOPOLOGY_SERIALIZER_CLASSNAME, None)
if serializer_clsname is None:
return PythonSerializer()
else:
try:
top... | python | {
"resource": ""
} |
q253312 | EventLooper.register_timer_task_in_sec | validation | def register_timer_task_in_sec(self, task, second):
"""Registers a new timer task
:param task: function to be run at a specified second from now
:param second: how many seconds to wait before the timer is triggered
"""
| python | {
"resource": ""
} |
q253313 | EventLooper._get_next_timeout_interval | validation | def _get_next_timeout_interval(self):
"""Get the next timeout from now
This should be used from do_wait().
:returns (float) next_timeout, or 10.0 if there are no timer events
"""
if | python | {
"resource": ""
} |
q253314 | EventLooper._trigger_timers | validation | def _trigger_timers(self):
"""Triggers expired timers"""
current = time.time()
while len(self.timer_tasks) > 0 and (self.timer_tasks[0][0] - | python | {
"resource": ""
} |
q253315 | Query.find_closing_braces | validation | def find_closing_braces(self, query):
"""Find the index of the closing braces for the opening braces
at the start of the query string. Note that first character
of input string must be an opening braces."""
if query[0] != '(':
raise Exception("Trying to find closing braces for no opening braces")
... | python | {
"resource": ""
} |
q253316 | Query.get_sub_parts | validation | def get_sub_parts(self, query):
"""The subparts are seperated by a comma. Make sure
that commas inside the part themselves are not considered."""
parts = []
num_open_braces = 0
delimiter = ','
last_starting_index = 0
for i in range(len(query)):
if query[i] == '(':
num_open_brac... | python | {
"resource": ""
} |
q253317 | Query.parse_query_string | validation | def parse_query_string(self, query):
"""Returns a parse tree for the query, each of the node is a
subclass of Operator. This is both a lexical as well as syntax analyzer step."""
if not query:
return None
# Just braces do not matter
if query[0] == '(':
index = self.find_closing_braces(qu... | python | {
"resource": ""
} |
q253318 | BoltInstance.process_incoming_tuples | validation | def process_incoming_tuples(self):
"""Should be called when tuple was buffered into in_stream
This method is equivalent to ``addBoltTasks()`` but
is designed for event-driven single-thread bolt.
"""
# back-pressure
if | python | {
"resource": ""
} |
q253319 | BoltInstance.ack | validation | def ack(self, tup):
"""Indicate that processing of a Tuple has succeeded
It is compatible with StreamParse API.
"""
if not isinstance(tup, HeronTuple):
Log.error("Only HeronTuple type is supported in ack()")
return
if self.acking_enabled:
ack_tuple = tuple_pb2.AckTuple()
ac... | python | {
"resource": ""
} |
q253320 | BoltInstance.fail | validation | def fail(self, tup):
"""Indicate that processing of a Tuple has failed
It is compatible with StreamParse API.
"""
if not isinstance(tup, HeronTuple):
Log.error("Only HeronTuple type is supported in fail()")
return
if self.acking_enabled:
fail_tuple = tuple_pb2.AckTuple()
fa... | python | {
"resource": ""
} |
q253321 | template_slave_hcl | validation | def template_slave_hcl(cl_args, masters):
'''
Template slave config file
'''
slave_config_template = "%s/standalone/templates/slave.template.hcl" % cl_args["config_path"]
slave_config_actual = "%s/standalone/resources/slave.hcl" % cl_args["config_path"]
masters_in_quotes = ['"%s"' | python | {
"resource": ""
} |
q253322 | template_scheduler_yaml | validation | def template_scheduler_yaml(cl_args, masters):
'''
Template scheduler.yaml
'''
single_master = masters[0]
scheduler_config_actual = "%s/standalone/scheduler.yaml" % cl_args["config_path"]
scheduler_config_template = "%s/standalone/templates/scheduler.template.yaml" \
| python | {
"resource": ""
} |
q253323 | template_uploader_yaml | validation | def template_uploader_yaml(cl_args, masters):
'''
Tempate uploader.yaml
'''
single_master = masters[0]
uploader_config_template = "%s/standalone/templates/uploader.template.yaml" \
% cl_args["config_path"]
uploader_config_actual = "%s/standalone/uploader.yaml" % cl_args["config_... | python | {
"resource": ""
} |
q253324 | template_apiserver_hcl | validation | def template_apiserver_hcl(cl_args, masters, zookeepers):
"""
template apiserver.hcl
"""
single_master = masters[0]
apiserver_config_template = "%s/standalone/templates/apiserver.template.hcl" \
% cl_args["config_path"]
apiserver_config_actual = "%s/standalone/resources/apiserv... | python | {
"resource": ""
} |
q253325 | template_statemgr_yaml | validation | def template_statemgr_yaml(cl_args, zookeepers):
'''
Template statemgr.yaml
'''
statemgr_config_file_template = "%s/standalone/templates/statemgr.template.yaml" \
% cl_args["config_path"]
statemgr_config_file_actual = "%s/standalone/statemgr.yaml" % cl_args["config_path"]
| python | {
"resource": ""
} |
q253326 | template_heron_tools_hcl | validation | def template_heron_tools_hcl(cl_args, masters, zookeepers):
'''
template heron tools
'''
heron_tools_hcl_template = "%s/standalone/templates/heron_tools.template.hcl" \
% cl_args["config_path"]
heron_tools_hcl_actual = "%s/standalone/resources/heron_tools.hcl" \
... | python | {
"resource": ""
} |
q253327 | print_cluster_info | validation | def print_cluster_info(cl_args):
'''
get cluster info for standalone cluster
'''
parsed_roles = read_and_parse_roles(cl_args)
masters = list(parsed_roles[Role.MASTERS])
slaves = list(parsed_roles[Role.SLAVES])
zookeepers = list(parsed_roles[Role.ZOOKEEPERS])
cluster = list(parsed_roles[Role.CLUSTER])
... | python | {
"resource": ""
} |
q253328 | add_additional_args | validation | def add_additional_args(parsers):
'''
add additional parameters to parser
'''
for parser in parsers:
cli_args.add_verbose(parser)
cli_args.add_config(parser)
parser.add_argument(
| python | {
"resource": ""
} |
q253329 | stop_cluster | validation | def stop_cluster(cl_args):
'''
teardown the cluster
'''
Log.info("Terminating cluster...")
roles = read_and_parse_roles(cl_args)
masters = roles[Role.MASTERS]
slaves = roles[Role.SLAVES]
dist_nodes = masters.union(slaves)
# stop all jobs
if masters:
try:
single_master = list(masters)[0]
... | python | {
"resource": ""
} |
q253330 | start_cluster | validation | def start_cluster(cl_args):
'''
Start a Heron standalone cluster
'''
roles = read_and_parse_roles(cl_args)
masters = roles[Role.MASTERS]
slaves = roles[Role.SLAVES]
zookeepers = roles[Role.ZOOKEEPERS]
Log.info("Roles:")
Log.info(" - Master Servers: %s" % list(masters))
Log.info(" - Slave Servers: %s... | python | {
"resource": ""
} |
q253331 | start_heron_tools | validation | def start_heron_tools(masters, cl_args):
'''
Start Heron tracker and UI
'''
single_master = list(masters)[0]
wait_for_master_to_start(single_master)
cmd = "%s run %s >> /tmp/heron_tools_start.log 2>&1 &" \
% (get_nomad_path(cl_args), get_heron_tools_job_file(cl_args))
Log.info("Starting Heron Too... | python | {
"resource": ""
} |
q253332 | distribute_package | validation | def distribute_package(roles, cl_args):
'''
distribute Heron packages to all nodes
'''
Log.info("Distributing heron package to nodes (this might take a while)...")
masters = roles[Role.MASTERS]
slaves = roles[Role.SLAVES]
tar_file = tempfile.NamedTemporaryFile(suffix=".tmp").name
Log.debug("TAR file %s... | python | {
"resource": ""
} |
q253333 | wait_for_master_to_start | validation | def wait_for_master_to_start(single_master):
'''
Wait for a nomad master to start
'''
i = 0
while True:
try:
r = requests.get("http://%s:4646/v1/status/leader" % single_master)
if r.status_code == 200:
break
except:
Log.debug(sys.exc_info()[0])
| python | {
"resource": ""
} |
q253334 | wait_for_job_to_start | validation | def wait_for_job_to_start(single_master, job):
'''
Wait for a Nomad job to start
'''
i = 0
while True:
try:
r = requests.get("http://%s:4646/v1/job/%s" % (single_master, job))
if r.status_code == 200 and r.json()["Status"] == "running":
break
else:
raise RuntimeError()
... | python | {
"resource": ""
} |
q253335 | scp_package | validation | def scp_package(package_file, destinations, cl_args):
'''
scp and extract package
'''
pids = []
for dest in destinations:
if is_self(dest):
continue
Log.info("Server: %s" % dest)
file_path = "/tmp/heron.tar.gz"
dest_file_path = "%s:%s" % (dest, file_path)
remote_cmd = "rm -rf ~/.her... | python | {
"resource": ""
} |
q253336 | make_tarfile | validation | def make_tarfile(output_filename, source_dir):
'''
Tar a directory
'''
with tarfile.open(output_filename, "w:gz") as tar:
| python | {
"resource": ""
} |
q253337 | start_master_nodes | validation | def start_master_nodes(masters, cl_args):
'''
Start master nodes
'''
pids = []
for master in masters:
Log.info("Starting master on %s" % master)
cmd = "%s agent -config %s >> /tmp/nomad_server_log 2>&1 &" \
% (get_nomad_path(cl_args), get_nomad_master_config_file(cl_args))
if not is_self... | python | {
"resource": ""
} |
q253338 | start_slave_nodes | validation | def start_slave_nodes(slaves, cl_args):
'''
Star slave nodes
'''
pids = []
for slave in slaves:
Log.info("Starting slave on %s" % slave)
cmd = "%s agent -config %s >> /tmp/nomad_client.log 2>&1 &" \
% (get_nomad_path(cl_args), get_nomad_slave_config_file(cl_args))
if not is_self(slave):
... | python | {
"resource": ""
} |
q253339 | read_and_parse_roles | validation | def read_and_parse_roles(cl_args):
'''
read config files to get roles
'''
roles = dict()
with open(get_inventory_file(cl_args), 'r') as stream:
try:
roles = yaml.load(stream)
except yaml.YAMLError as exc:
Log.error("Error parsing inventory file: %s" % exc)
sys.exit(-1)
if Role.ZO... | python | {
"resource": ""
} |
q253340 | get_remote_home | validation | def get_remote_home(host, cl_args):
'''
get home directory of remote host
'''
cmd = "echo ~"
if not is_self(host):
cmd = ssh_remote_execute(cmd, host, cl_args)
pid = subprocess.Popen(cmd,
| python | {
"resource": ""
} |
q253341 | get_hostname | validation | def get_hostname(ip_addr, cl_args):
'''
get host name of remote host
'''
if is_self(ip_addr):
return get_self_hostname()
cmd = "hostname"
ssh_cmd = ssh_remote_execute(cmd, ip_addr, cl_args)
pid = subprocess.Popen(ssh_cmd,
shell=True,
stdout=subprocess.... | python | {
"resource": ""
} |
q253342 | is_self | validation | def is_self(addr):
'''
check if this host is this addr
'''
ips = []
for i in netifaces.interfaces():
entry = netifaces.ifaddresses(i)
if netifaces.AF_INET in entry:
for ipv4 in entry[netifaces.AF_INET]: | python | {
"resource": ""
} |
q253343 | BaseInstance.log | validation | def log(self, message, level=None):
"""Log message, optionally providing a logging level
It is compatible with StreamParse API.
:type message: str
:param message: the log message to send
:type level: str
:param level: the logging level,
one of: trace (=debug), debug, info, wa... | python | {
"resource": ""
} |
q253344 | dereference_symlinks | validation | def dereference_symlinks(src):
"""
Resolve all symbolic references that `src` points to. Note that this
is different than `os.path.realpath` as path components leading up to
the final location may still be symbolic links. | python | {
"resource": ""
} |
q253345 | to_table | validation | def to_table(result):
''' normalize raw result to table '''
max_count = 20
table, count = [], 0
for role, envs_topos in result.items():
for env, topos in envs_topos.items():
for topo in topos:
count += 1
if count > max_count:
| python | {
"resource": ""
} |
q253346 | Result.add_context | validation | def add_context(self, err_context, succ_context=None):
""" Prepend msg to add some context information
:param pmsg: context info
:return: None
| python | {
"resource": ""
} |
q253347 | StateManager.is_host_port_reachable | validation | def is_host_port_reachable(self):
"""
Returns true if the host is reachable. In some cases, it may not be reachable a tunnel
must be used.
"""
for hostport in self.hostportlist:
try:
socket.create_connection(hostport, StateManager.TIMEOUT_SECONDS)
return True
except:
| python | {
"resource": ""
} |
q253348 | StateManager.pick_unused_port | validation | def pick_unused_port(self):
""" Pick an unused port. There is a slight chance that this wont work. """
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) | python | {
"resource": ""
} |
q253349 | StateManager.establish_ssh_tunnel | validation | def establish_ssh_tunnel(self):
"""
Establish an ssh tunnel for each local host and port
that can be used to communicate with the state host.
"""
localportlist = []
for (host, port) | python | {
"resource": ""
} |
q253350 | FileStateManager.monitor | validation | def monitor(self):
"""
Monitor the rootpath and call the callback
corresponding to the change.
This monitoring happens periodically. This function
is called in a seperate thread from the main thread,
because it sleeps for the intervals between each poll.
"""
def trigger_watches_based_on... | python | {
"resource": ""
} |
q253351 | FileStateManager.get_pplan | validation | def get_pplan(self, topologyName, callback=None):
"""
Get physical plan of a topology
"""
if callback:
self.pplan_watchers[topologyName].append(callback)
else:
pplan_path = self.get_pplan_path(topologyName)
| python | {
"resource": ""
} |
q253352 | FileStateManager.get_execution_state | validation | def get_execution_state(self, topologyName, callback=None):
"""
Get execution state
"""
if callback:
self.execution_state_watchers[topologyName].append(callback)
else:
execution_state_path = self.get_execution_state_path(topologyName)
| python | {
"resource": ""
} |
q253353 | FileStateManager.get_scheduler_location | validation | def get_scheduler_location(self, topologyName, callback=None):
"""
Get scheduler location
"""
if callback:
self.scheduler_location_watchers[topologyName].append(callback)
| python | {
"resource": ""
} |
q253354 | create_socket_options | validation | def create_socket_options():
"""Creates SocketOptions object from a given sys_config dict"""
sys_config = system_config.get_sys_config()
opt_list = [const.INSTANCE_NETWORK_WRITE_BATCH_SIZE_BYTES,
const.INSTANCE_NETWORK_WRITE_BATCH_TIME_MS,
const.INSTANCE_NETWORK_READ_BATCH_SIZE_BYTES,
... | python | {
"resource": ""
} |
q253355 | TopologyType.init_topology | validation | def init_topology(mcs, classname, class_dict):
"""Initializes a topology protobuf"""
if classname == 'Topology':
# Base class can't initialize protobuf
return
heron_options = TopologyType.get_heron_options_from_env()
initial_state = heron_options.get("cmdline.topology.initial.state", "RUNNIN... | python | {
"resource": ""
} |
q253356 | TopologyType.get_heron_options_from_env | validation | def get_heron_options_from_env():
"""Retrieves heron options from the `HERON_OPTIONS` environment variable.
Heron options have the following format:
cmdline.topologydefn.tmpdirectory=/var/folders/tmpdir
cmdline.topology.initial.state=PAUSED
In this case, the returned map will contain:
... | python | {
"resource": ""
} |
q253357 | TopologyBuilder.add_spec | validation | def add_spec(self, *specs):
"""Add specs to the topology
:type specs: HeronComponentSpec
:param specs: specs to add to the topology
"""
for spec in specs:
if not isinstance(spec, HeronComponentSpec):
raise TypeError("Argument to add_spec needs to be HeronComponentSpec, given: %s"
| python | {
"resource": ""
} |
q253358 | TopologyBuilder.add_spout | validation | def add_spout(self, name, spout_cls, par, config=None, optional_outputs=None):
"""Add a spout to the topology"""
spout_spec = | python | {
"resource": ""
} |
q253359 | TopologyBuilder.add_bolt | validation | def add_bolt(self, name, bolt_cls, par, inputs, config=None, optional_outputs=None):
"""Add a bolt to the topology"""
bolt_spec = bolt_cls.spec(name=name, par=par, inputs=inputs, config=config,
| python | {
"resource": ""
} |
q253360 | TopologyBuilder.set_config | validation | def set_config(self, config):
"""Set topology-wide configuration to the topology
:type config: dict
:param config: topology-wide config
"""
if not isinstance(config, dict):
| python | {
"resource": ""
} |
q253361 | TopologyBuilder.build_and_submit | validation | def build_and_submit(self):
"""Builds the topology and submits to the destination"""
class_dict | python | {
"resource": ""
} |
q253362 | queries_map | validation | def queries_map():
"""map from query parameter to query name""" | python | {
"resource": ""
} |
q253363 | get_clusters | validation | def get_clusters():
"""Synced API call to get all cluster names"""
instance = tornado.ioloop.IOLoop.instance()
# pylint: disable=unnecessary-lambda
try:
| python | {
"resource": ""
} |
q253364 | get_logical_plan | validation | def get_logical_plan(cluster, env, topology, role):
"""Synced API call to get logical plans"""
instance = tornado.ioloop.IOLoop.instance()
try:
return instance.run_sync(lambda: | python | {
"resource": ""
} |
q253365 | get_topology_info | validation | def get_topology_info(*args):
"""Synced API call to get topology information"""
instance = tornado.ioloop.IOLoop.instance()
try:
return instance.run_sync(lambda: | python | {
"resource": ""
} |
q253366 | get_component_metrics | validation | def get_component_metrics(component, cluster, env, topology, role):
"""Synced API call to get component metrics"""
all_queries = metric_queries()
try:
result = get_topology_metrics(cluster, env, topology, component, [],
| python | {
"resource": ""
} |
q253367 | configure | validation | def configure(level=logging.INFO, logfile=None):
""" Configure logger which dumps log on terminal
:param level: logging level: info, warning, verbose...
:type level: logging level
:param logfile: log file name, default to None
:type logfile: string
:return: None
:rtype: None
"""
# Remove all the exi... | python | {
"resource": ""
} |
q253368 | init_rotating_logger | validation | def init_rotating_logger(level, logfile, max_files, max_bytes):
"""Initializes a rotating logger
It also makes sure that any StreamHandler is removed, so as to avoid stdout/stderr
constipation issues
"""
logging.basicConfig()
root_logger = logging.getLogger()
log_format = "[%(asctime)s] [%(levelname)s] ... | python | {
"resource": ""
} |
q253369 | set_logging_level | validation | def set_logging_level(cl_args):
"""simply set verbose level based on command-line args
:param cl_args: CLI arguments
:type cl_args: dict
:return: None
:rtype: None
"""
| python | {
"resource": ""
} |
q253370 | HeronComponentSpec._get_spout | validation | def _get_spout(self):
"""Returns Spout protobuf message"""
spout = topology_pb2.Spout()
spout.comp.CopyFrom(self._get_base_component())
| python | {
"resource": ""
} |
q253371 | HeronComponentSpec._get_bolt | validation | def _get_bolt(self):
"""Returns Bolt protobuf message"""
bolt = topology_pb2.Bolt()
bolt.comp.CopyFrom(self._get_base_component())
# Add streams
| python | {
"resource": ""
} |
q253372 | HeronComponentSpec._get_base_component | validation | def _get_base_component(self):
"""Returns Component protobuf message"""
comp = topology_pb2.Component()
comp.name = self.name
comp.spec = topology_pb2.ComponentObjectSpec.Value("PYTHON_CLASS_NAME")
| python | {
"resource": ""
} |
q253373 | HeronComponentSpec._get_comp_config | validation | def _get_comp_config(self):
"""Returns component-specific Config protobuf message
It first adds ``topology.component.parallelism``, and is overriden by
a user-defined component-specific configuration, specified by spec().
"""
proto_config = topology_pb2.Config()
# first add parallelism
key... | python | {
"resource": ""
} |
q253374 | HeronComponentSpec._add_in_streams | validation | def _add_in_streams(self, bolt):
"""Adds inputs to a given protobuf Bolt message"""
if self.inputs is None:
return
# sanitize inputs and get a map <GlobalStreamId -> Grouping>
input_dict = self._sanitize_inputs()
for global_streamid, gtype in input_dict.items():
in_stream = bolt.inputs.... | python | {
"resource": ""
} |
q253375 | HeronComponentSpec._add_out_streams | validation | def _add_out_streams(self, spbl):
"""Adds outputs to a given protobuf Bolt or Spout message"""
if self.outputs is None:
return
# sanitize outputs and get a map <stream_id -> out fields>
output_map = self._sanitize_outputs()
for stream_id, out_fields in output_map.items(): | python | {
"resource": ""
} |
q253376 | HeronComponentSpec.get_out_streamids | validation | def get_out_streamids(self):
"""Returns a set of output stream ids registered for this component"""
if self.outputs is None:
return set()
if not isinstance(self.outputs, (list, tuple)):
raise TypeError("Argument to outputs must be either list or tuple, given: | python | {
"resource": ""
} |
q253377 | HeronComponentSpec._get_stream_id | validation | def _get_stream_id(comp_name, stream_id):
"""Returns a StreamId protobuf message"""
proto_stream_id = topology_pb2.StreamId()
proto_stream_id.id = stream_id | python | {
"resource": ""
} |
q253378 | HeronComponentSpec._get_stream_schema | validation | def _get_stream_schema(fields):
"""Returns a StreamSchema protobuf message"""
stream_schema = topology_pb2.StreamSchema()
for field in fields:
key = stream_schema.keys.add()
| python | {
"resource": ""
} |
q253379 | GlobalStreamId.component_id | validation | def component_id(self):
"""Returns component_id of this GlobalStreamId
Note that if HeronComponentSpec is specified as componentId and its name is not yet
available (i.e. when ``name`` argument was not given in ``spec()`` method in Bolt or Spout),
this property returns a message with uuid. However, thi... | python | {
"resource": ""
} |
q253380 | TopologyContextImpl.register_metric | validation | def register_metric(self, name, metric, time_bucket_in_sec):
"""Registers a new metric to this context"""
collector = | python | {
"resource": ""
} |
q253381 | TopologyContextImpl.get_sources | validation | def get_sources(self, component_id):
"""Returns the declared inputs to specified component
:return: map <streamId namedtuple (same structure as protobuf msg) -> gtype>, or
None if not found
"""
# this is necessary because protobuf message is not hashable
StreamId = namedtuple('StreamId... | python | {
"resource": ""
} |
q253382 | TopologyContextImpl.get_component_tasks | validation | def get_component_tasks(self, component_id):
"""Returns the task ids allocated for the given component id""" | python | {
"resource": ""
} |
q253383 | TopologyContextImpl.add_task_hook | validation | def add_task_hook(self, task_hook):
"""Registers a specified task hook to this context
:type task_hook: heron.instance.src.python.utils.topology.ITaskHook
:param task_hook: Implementation of ITaskHook
"""
if not isinstance(task_hook, ITaskHook):
| python | {
"resource": ""
} |
q253384 | TopologyContextImpl.get_metrics_collector | validation | def get_metrics_collector(self):
"""Returns this context's metrics collector"""
if self.metrics_collector is None or not isinstance(self.metrics_collector, MetricsCollector):
| python | {
"resource": ""
} |
q253385 | TopologyContextImpl.invoke_hook_spout_ack | validation | def invoke_hook_spout_ack(self, message_id, complete_latency_ns):
"""invoke task hooks for every time spout acks a tuple
:type message_id: str
:param message_id: message id to which an acked tuple was anchored
:type complete_latency_ns: float
:param complete_latency_ns: complete latency in nano sec... | python | {
"resource": ""
} |
q253386 | TopologyContextImpl.invoke_hook_spout_fail | validation | def invoke_hook_spout_fail(self, message_id, fail_latency_ns):
"""invoke task hooks for every time spout fails a tuple
:type message_id: str
:param message_id: message id to which a failed tuple was anchored
:type fail_latency_ns: float
:param fail_latency_ns: fail latency in nano | python | {
"resource": ""
} |
q253387 | TopologyContextImpl.invoke_hook_bolt_execute | validation | def invoke_hook_bolt_execute(self, heron_tuple, execute_latency_ns):
"""invoke task hooks for every time bolt processes a tuple
:type heron_tuple: HeronTuple
:param heron_tuple: tuple that is executed
:type execute_latency_ns: float
:param execute_latency_ns: execute latency in nano seconds
"""... | python | {
"resource": ""
} |
q253388 | TopologyContextImpl.invoke_hook_bolt_ack | validation | def invoke_hook_bolt_ack(self, heron_tuple, process_latency_ns):
"""invoke task hooks for every time bolt acks a tuple
:type heron_tuple: HeronTuple
:param heron_tuple: tuple that is acked
:type process_latency_ns: float
:param process_latency_ns: process latency in nano seconds
"""
if len(... | python | {
"resource": ""
} |
q253389 | TopologyContextImpl.invoke_hook_bolt_fail | validation | def invoke_hook_bolt_fail(self, heron_tuple, fail_latency_ns):
"""invoke task hooks for every time bolt fails a tuple
:type heron_tuple: HeronTuple
:param heron_tuple: tuple that is failed
:type fail_latency_ns: float
:param fail_latency_ns: fail latency in nano | python | {
"resource": ""
} |
q253390 | submit_fatjar | validation | def submit_fatjar(cl_args, unknown_args, tmp_dir):
'''
We use the packer to make a package for the jar and dump it
to a well-known location. We then run the main method of class
with the specified arguments. We pass arguments as an environment variable HERON_OPTIONS.
This will run the jar file with the topol... | python | {
"resource": ""
} |
q253391 | submit_tar | validation | def submit_tar(cl_args, unknown_args, tmp_dir):
'''
Extract and execute the java files inside the tar and then add topology
definition file created by running submitTopology
We use the packer to make a package for the tar and dump it
to a well-known location. We then run the main method of class
with the s... | python | {
"resource": ""
} |
q253392 | TextFileGenerator.setup | validation | def setup(self, context):
"""Implements TextFile Generator's setup method"""
myindex = context.get_partition_index()
self._files_to_consume = self._files[myindex::context.get_num_partitions()]
self.logger.info("TextFileSpout files to | python | {
"resource": ""
} |
q253393 | add_verbose | validation | def add_verbose(parser):
""" add optional verbose argument"""
parser.add_argument(
| python | {
"resource": ""
} |
q253394 | add_tracker_url | validation | def add_tracker_url(parser):
""" add optional tracker_url argument """
parser.add_argument(
'--tracker_url',
metavar='(tracker | python | {
"resource": ""
} |
q253395 | hex_escape | validation | def hex_escape(bin_str):
"""
Hex encode a binary string
"""
printable = string.ascii_letters + string.digits + string.punctuation + ' '
| python | {
"resource": ""
} |
q253396 | make_shell_endpoint | validation | def make_shell_endpoint(topologyInfo, instance_id):
"""
Makes the http endpoint for the heron shell
if shell port is present, otherwise returns None.
"""
# Format: container_<id>_<instance_id>
pplan = topologyInfo["physical_plan"]
stmgrId = | python | {
"resource": ""
} |
q253397 | make_shell_logfiles_url | validation | def make_shell_logfiles_url(host, shell_port, _, instance_id=None):
"""
Make the url for log-files in heron-shell
from the info stored in stmgr.
If no instance_id is provided, the link will
be to the dir for the whole container.
If shell port is not present, it returns None.
"""
if not shell_port:
r... | python | {
"resource": ""
} |
q253398 | make_shell_logfile_data_url | validation | def make_shell_logfile_data_url(host, shell_port, instance_id, offset, length):
"""
Make the url for log-file data in heron-shell | python | {
"resource": ""
} |
q253399 | OutgoingPacket.create_packet | validation | def create_packet(reqid, message):
"""Creates Outgoing Packet from a given reqid and message
:param reqid: REQID object
:param message: protocol buffer object
"""
assert message.IsInitialized()
packet = ''
# calculate the totla size of the packet incl. header
typename = message.DESCRIP... | python | {
"resource": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.