id int32 0 252k | repo stringlengths 7 55 | path stringlengths 4 127 | func_name stringlengths 1 88 | original_string stringlengths 75 19.8k | language stringclasses 1
value | code stringlengths 51 19.8k | code_tokens list | docstring stringlengths 3 17.3k | docstring_tokens list | sha stringlengths 40 40 | url stringlengths 87 242 |
|---|---|---|---|---|---|---|---|---|---|---|---|
23,600 | iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Job.py | Jobs.run | def run(self, postfunc=lambda: None):
"""Run the jobs.
postfunc() will be invoked after the jobs has run. It will be
invoked even if the jobs are interrupted by a keyboard
interrupt (well, in fact by a signal such as either SIGINT,
SIGTERM or SIGHUP). The execution of postfunc() is protected
against keyboard interrupts and is guaranteed to run to
completion."""
self._setup_sig_handler()
try:
self.job.start()
finally:
postfunc()
self._reset_sig_handler() | python | def run(self, postfunc=lambda: None):
self._setup_sig_handler()
try:
self.job.start()
finally:
postfunc()
self._reset_sig_handler() | [
"def",
"run",
"(",
"self",
",",
"postfunc",
"=",
"lambda",
":",
"None",
")",
":",
"self",
".",
"_setup_sig_handler",
"(",
")",
"try",
":",
"self",
".",
"job",
".",
"start",
"(",
")",
"finally",
":",
"postfunc",
"(",
")",
"self",
".",
"_reset_sig_hand... | Run the jobs.
postfunc() will be invoked after the jobs has run. It will be
invoked even if the jobs are interrupted by a keyboard
interrupt (well, in fact by a signal such as either SIGINT,
SIGTERM or SIGHUP). The execution of postfunc() is protected
against keyboard interrupts and is guaranteed to run to
completion. | [
"Run",
"the",
"jobs",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Job.py#L100-L114 |
23,601 | iotile/coretools | transport_plugins/native_ble/iotile_transport_native_ble/connection_manager.py | ConnectionAction.expired | def expired(self):
"""Boolean property if this action has expired
"""
if self.timeout is None:
return False
return monotonic() - self.start_time > self.timeout | python | def expired(self):
if self.timeout is None:
return False
return monotonic() - self.start_time > self.timeout | [
"def",
"expired",
"(",
"self",
")",
":",
"if",
"self",
".",
"timeout",
"is",
"None",
":",
"return",
"False",
"return",
"monotonic",
"(",
")",
"-",
"self",
".",
"start_time",
">",
"self",
".",
"timeout"
] | Boolean property if this action has expired | [
"Boolean",
"property",
"if",
"this",
"action",
"has",
"expired"
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/transport_plugins/native_ble/iotile_transport_native_ble/connection_manager.py#L37-L43 |
23,602 | iotile/coretools | transport_plugins/native_ble/iotile_transport_native_ble/connection_manager.py | ConnectionManager.begin_connection | def begin_connection(self, connection_id, internal_id, callback, context, timeout):
"""Asynchronously begin a connection attempt
Args:
connection_id (int): The external connection id
internal_id (string): An internal identifier for the connection
callback (callable): The function to be called when the connection
attempt finishes
context (dict): Additional information to associate with this context
timeout (float): How long to allow this connection attempt to proceed
without timing it out
"""
data = {
'callback': callback,
'connection_id': connection_id,
'internal_id': internal_id,
'context': context
}
action = ConnectionAction('begin_connection', data, timeout=timeout, sync=False)
self._actions.put(action) | python | def begin_connection(self, connection_id, internal_id, callback, context, timeout):
data = {
'callback': callback,
'connection_id': connection_id,
'internal_id': internal_id,
'context': context
}
action = ConnectionAction('begin_connection', data, timeout=timeout, sync=False)
self._actions.put(action) | [
"def",
"begin_connection",
"(",
"self",
",",
"connection_id",
",",
"internal_id",
",",
"callback",
",",
"context",
",",
"timeout",
")",
":",
"data",
"=",
"{",
"'callback'",
":",
"callback",
",",
"'connection_id'",
":",
"connection_id",
",",
"'internal_id'",
":... | Asynchronously begin a connection attempt
Args:
connection_id (int): The external connection id
internal_id (string): An internal identifier for the connection
callback (callable): The function to be called when the connection
attempt finishes
context (dict): Additional information to associate with this context
timeout (float): How long to allow this connection attempt to proceed
without timing it out | [
"Asynchronously",
"begin",
"a",
"connection",
"attempt"
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/transport_plugins/native_ble/iotile_transport_native_ble/connection_manager.py#L337-L358 |
23,603 | iotile/coretools | transport_plugins/native_ble/iotile_transport_native_ble/connection_manager.py | ConnectionManager.begin_operation | def begin_operation(self, conn_or_internal_id, op_name, callback, timeout):
"""Begin an operation on a connection
Args:
conn_or_internal_id (string, int): Either an integer connection id or a string
internal_id
op_name (string): The name of the operation that we are starting (stored in
the connection's microstate)
callback (callable): Callback to call when this disconnection attempt either
succeeds or fails
timeout (float): How long to allow this connection attempt to proceed
without timing it out (in seconds)
"""
data = {
'id': conn_or_internal_id,
'callback': callback,
'operation_name': op_name
}
action = ConnectionAction('begin_operation', data, timeout=timeout, sync=False)
self._actions.put(action) | python | def begin_operation(self, conn_or_internal_id, op_name, callback, timeout):
data = {
'id': conn_or_internal_id,
'callback': callback,
'operation_name': op_name
}
action = ConnectionAction('begin_operation', data, timeout=timeout, sync=False)
self._actions.put(action) | [
"def",
"begin_operation",
"(",
"self",
",",
"conn_or_internal_id",
",",
"op_name",
",",
"callback",
",",
"timeout",
")",
":",
"data",
"=",
"{",
"'id'",
":",
"conn_or_internal_id",
",",
"'callback'",
":",
"callback",
",",
"'operation_name'",
":",
"op_name",
"}"... | Begin an operation on a connection
Args:
conn_or_internal_id (string, int): Either an integer connection id or a string
internal_id
op_name (string): The name of the operation that we are starting (stored in
the connection's microstate)
callback (callable): Callback to call when this disconnection attempt either
succeeds or fails
timeout (float): How long to allow this connection attempt to proceed
without timing it out (in seconds) | [
"Begin",
"an",
"operation",
"on",
"a",
"connection"
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/transport_plugins/native_ble/iotile_transport_native_ble/connection_manager.py#L611-L632 |
23,604 | iotile/coretools | transport_plugins/native_ble/iotile_transport_native_ble/connection_manager.py | ConnectionManager._begin_operation_action | def _begin_operation_action(self, action):
"""Begin an attempted operation.
Args:
action (ConnectionAction): the action object describing what we are
operating on
"""
conn_key = action.data['id']
callback = action.data['callback']
if self._get_connection_state(conn_key) != self.Idle:
callback(conn_key, self.id, False, 'Cannot start operation, connection is not idle')
return
data = self._get_connection(conn_key)
data['state'] = self.InProgress
data['microstate'] = action.data['operation_name']
data['action'] = action | python | def _begin_operation_action(self, action):
conn_key = action.data['id']
callback = action.data['callback']
if self._get_connection_state(conn_key) != self.Idle:
callback(conn_key, self.id, False, 'Cannot start operation, connection is not idle')
return
data = self._get_connection(conn_key)
data['state'] = self.InProgress
data['microstate'] = action.data['operation_name']
data['action'] = action | [
"def",
"_begin_operation_action",
"(",
"self",
",",
"action",
")",
":",
"conn_key",
"=",
"action",
".",
"data",
"[",
"'id'",
"]",
"callback",
"=",
"action",
".",
"data",
"[",
"'callback'",
"]",
"if",
"self",
".",
"_get_connection_state",
"(",
"conn_key",
"... | Begin an attempted operation.
Args:
action (ConnectionAction): the action object describing what we are
operating on | [
"Begin",
"an",
"attempted",
"operation",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/transport_plugins/native_ble/iotile_transport_native_ble/connection_manager.py#L634-L652 |
23,605 | iotile/coretools | transport_plugins/websocket/iotile_transport_websocket/generic/async_client.py | AsyncValidatingWSClient.allow_exception | def allow_exception(self, exc_class):
"""Allow raising this class of exceptions from commands.
When a command fails on the server side due to an exception, by
default it is turned into a string and raised on the client side as an
ExternalError. The original class name is sent but ignored. If you
would like to instead raise an instance of the same exception on the
client side, you can pass the exception class object to this method
and instances of that exception will be reraised.
The caveat is that the exception must be creatable with a single
string parameter and it should have a ``msg`` property.
Args:
exc_class (class): A class object with the exception that
we should allow to pass from server to client.
"""
name = exc_class.__name__
self._allowed_exceptions[name] = exc_class | python | def allow_exception(self, exc_class):
name = exc_class.__name__
self._allowed_exceptions[name] = exc_class | [
"def",
"allow_exception",
"(",
"self",
",",
"exc_class",
")",
":",
"name",
"=",
"exc_class",
".",
"__name__",
"self",
".",
"_allowed_exceptions",
"[",
"name",
"]",
"=",
"exc_class"
] | Allow raising this class of exceptions from commands.
When a command fails on the server side due to an exception, by
default it is turned into a string and raised on the client side as an
ExternalError. The original class name is sent but ignored. If you
would like to instead raise an instance of the same exception on the
client side, you can pass the exception class object to this method
and instances of that exception will be reraised.
The caveat is that the exception must be creatable with a single
string parameter and it should have a ``msg`` property.
Args:
exc_class (class): A class object with the exception that
we should allow to pass from server to client. | [
"Allow",
"raising",
"this",
"class",
"of",
"exceptions",
"from",
"commands",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/transport_plugins/websocket/iotile_transport_websocket/generic/async_client.py#L53-L72 |
23,606 | iotile/coretools | transport_plugins/websocket/iotile_transport_websocket/generic/async_client.py | AsyncValidatingWSClient.start | async def start(self, name="websocket_client"):
"""Connect to the websocket server.
This method will spawn a background task in the designated event loop
that will run until stop() is called. You can control the name of the
background task for debugging purposes using the name parameter. The
name is not used in anyway except for debug logging statements.
Args:
name (str): Optional name for the background task.
"""
self._con = await websockets.connect(self.url)
self._connection_task = self._loop.add_task(self._manage_connection(), name=name) | python | async def start(self, name="websocket_client"):
self._con = await websockets.connect(self.url)
self._connection_task = self._loop.add_task(self._manage_connection(), name=name) | [
"async",
"def",
"start",
"(",
"self",
",",
"name",
"=",
"\"websocket_client\"",
")",
":",
"self",
".",
"_con",
"=",
"await",
"websockets",
".",
"connect",
"(",
"self",
".",
"url",
")",
"self",
".",
"_connection_task",
"=",
"self",
".",
"_loop",
".",
"a... | Connect to the websocket server.
This method will spawn a background task in the designated event loop
that will run until stop() is called. You can control the name of the
background task for debugging purposes using the name parameter. The
name is not used in anyway except for debug logging statements.
Args:
name (str): Optional name for the background task. | [
"Connect",
"to",
"the",
"websocket",
"server",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/transport_plugins/websocket/iotile_transport_websocket/generic/async_client.py#L74-L87 |
23,607 | iotile/coretools | transport_plugins/websocket/iotile_transport_websocket/generic/async_client.py | AsyncValidatingWSClient.stop | async def stop(self):
"""Stop this websocket client and disconnect from the server.
This method is idempotent and may be called multiple times. If called
when there is no active connection, it will simply return.
"""
if self._connection_task is None:
return
try:
await self._connection_task.stop()
finally:
self._con = None
self._connection_task = None
self._manager.clear() | python | async def stop(self):
if self._connection_task is None:
return
try:
await self._connection_task.stop()
finally:
self._con = None
self._connection_task = None
self._manager.clear() | [
"async",
"def",
"stop",
"(",
"self",
")",
":",
"if",
"self",
".",
"_connection_task",
"is",
"None",
":",
"return",
"try",
":",
"await",
"self",
".",
"_connection_task",
".",
"stop",
"(",
")",
"finally",
":",
"self",
".",
"_con",
"=",
"None",
"self",
... | Stop this websocket client and disconnect from the server.
This method is idempotent and may be called multiple times. If called
when there is no active connection, it will simply return. | [
"Stop",
"this",
"websocket",
"client",
"and",
"disconnect",
"from",
"the",
"server",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/transport_plugins/websocket/iotile_transport_websocket/generic/async_client.py#L89-L104 |
23,608 | iotile/coretools | transport_plugins/websocket/iotile_transport_websocket/generic/async_client.py | AsyncValidatingWSClient.send_command | async def send_command(self, command, args, validator, timeout=10.0):
"""Send a command and synchronously wait for a single response.
Args:
command (string): The command name
args (dict): Optional arguments.
validator (Verifier): A SchemaVerifier to verify the response
payload.
timeout (float): The maximum time to wait for a response.
Defaults to 10 seconds.
Returns:
dict: The response payload
Raises:
ExternalError: If the server is not connected or the command
fails.
asyncio.TimeoutError: If the command times out.
ValidationError: If the response payload does not match the
given validator.
"""
if self._con is None:
raise ExternalError("No websock connection established")
cmd_uuid = str(uuid.uuid4())
msg = dict(type='command', operation=command, uuid=cmd_uuid,
payload=args)
packed = pack(msg)
# Note: register future before sending to avoid race conditions
response_future = self._manager.wait_for(type="response", uuid=cmd_uuid,
timeout=timeout)
await self._con.send(packed)
response = await response_future
if response.get('success') is False:
self._raise_error(command, response)
if validator is None:
return response.get('payload')
return validator.verify(response.get('payload')) | python | async def send_command(self, command, args, validator, timeout=10.0):
if self._con is None:
raise ExternalError("No websock connection established")
cmd_uuid = str(uuid.uuid4())
msg = dict(type='command', operation=command, uuid=cmd_uuid,
payload=args)
packed = pack(msg)
# Note: register future before sending to avoid race conditions
response_future = self._manager.wait_for(type="response", uuid=cmd_uuid,
timeout=timeout)
await self._con.send(packed)
response = await response_future
if response.get('success') is False:
self._raise_error(command, response)
if validator is None:
return response.get('payload')
return validator.verify(response.get('payload')) | [
"async",
"def",
"send_command",
"(",
"self",
",",
"command",
",",
"args",
",",
"validator",
",",
"timeout",
"=",
"10.0",
")",
":",
"if",
"self",
".",
"_con",
"is",
"None",
":",
"raise",
"ExternalError",
"(",
"\"No websock connection established\"",
")",
"cmd... | Send a command and synchronously wait for a single response.
Args:
command (string): The command name
args (dict): Optional arguments.
validator (Verifier): A SchemaVerifier to verify the response
payload.
timeout (float): The maximum time to wait for a response.
Defaults to 10 seconds.
Returns:
dict: The response payload
Raises:
ExternalError: If the server is not connected or the command
fails.
asyncio.TimeoutError: If the command times out.
ValidationError: If the response payload does not match the
given validator. | [
"Send",
"a",
"command",
"and",
"synchronously",
"wait",
"for",
"a",
"single",
"response",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/transport_plugins/websocket/iotile_transport_websocket/generic/async_client.py#L106-L151 |
23,609 | iotile/coretools | transport_plugins/websocket/iotile_transport_websocket/generic/async_client.py | AsyncValidatingWSClient._manage_connection | async def _manage_connection(self):
"""Internal coroutine for managing the client connection."""
try:
while True:
message = await self._con.recv()
try:
unpacked = unpack(message)
except Exception: # pylint:disable=broad-except;This is a background worker
self._logger.exception("Corrupt message received")
continue
if not VALID_SERVER_MESSAGE.matches(unpacked):
self._logger.warning("Dropping invalid message from server: %s", unpacked)
continue
# Don't block until all callbacks have finished since once of
# those callbacks may call self.send_command, which would deadlock
# since it couldn't get the response until it had already finished.
if not await self._manager.process_message(unpacked, wait=False):
self._logger.warning("No handler found for received message, message=%s", unpacked)
except asyncio.CancelledError:
self._logger.info("Closing connection to server due to stop()")
finally:
await self._manager.process_message(dict(type='event', name=self.DISCONNECT_EVENT, payload=None))
await self._con.close() | python | async def _manage_connection(self):
try:
while True:
message = await self._con.recv()
try:
unpacked = unpack(message)
except Exception: # pylint:disable=broad-except;This is a background worker
self._logger.exception("Corrupt message received")
continue
if not VALID_SERVER_MESSAGE.matches(unpacked):
self._logger.warning("Dropping invalid message from server: %s", unpacked)
continue
# Don't block until all callbacks have finished since once of
# those callbacks may call self.send_command, which would deadlock
# since it couldn't get the response until it had already finished.
if not await self._manager.process_message(unpacked, wait=False):
self._logger.warning("No handler found for received message, message=%s", unpacked)
except asyncio.CancelledError:
self._logger.info("Closing connection to server due to stop()")
finally:
await self._manager.process_message(dict(type='event', name=self.DISCONNECT_EVENT, payload=None))
await self._con.close() | [
"async",
"def",
"_manage_connection",
"(",
"self",
")",
":",
"try",
":",
"while",
"True",
":",
"message",
"=",
"await",
"self",
".",
"_con",
".",
"recv",
"(",
")",
"try",
":",
"unpacked",
"=",
"unpack",
"(",
"message",
")",
"except",
"Exception",
":",
... | Internal coroutine for managing the client connection. | [
"Internal",
"coroutine",
"for",
"managing",
"the",
"client",
"connection",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/transport_plugins/websocket/iotile_transport_websocket/generic/async_client.py#L164-L190 |
23,610 | iotile/coretools | transport_plugins/websocket/iotile_transport_websocket/generic/async_client.py | AsyncValidatingWSClient.register_event | def register_event(self, name, callback, validator):
"""Register a callback to receive events.
Every event with the matching name will have its payload validated
using validator and then will be passed to callback if validation
succeeds.
Callback must be a normal callback function, coroutines are not
allowed. If you need to run a coroutine you are free to schedule it
from your callback.
Args:
name (str): The name of the event that we are listening
for
callback (callable): The function that should be called
when a message that matches validator is received.
validator (Verifier): A schema verifier that will
validate a received message uniquely
"""
async def _validate_and_call(message):
payload = message.get('payload')
try:
payload = validator.verify(payload)
except ValidationError:
self._logger.warning("Dropping invalid payload for event %s, payload=%s",
name, payload)
return
try:
result = callback(payload)
if inspect.isawaitable(result):
await result
except: # pylint:disable=bare-except;This is a background logging routine
self._logger.error("Error calling callback for event %s, payload=%s",
name, payload, exc_info=True)
self._manager.every_match(_validate_and_call, type="event", name=name) | python | def register_event(self, name, callback, validator):
async def _validate_and_call(message):
payload = message.get('payload')
try:
payload = validator.verify(payload)
except ValidationError:
self._logger.warning("Dropping invalid payload for event %s, payload=%s",
name, payload)
return
try:
result = callback(payload)
if inspect.isawaitable(result):
await result
except: # pylint:disable=bare-except;This is a background logging routine
self._logger.error("Error calling callback for event %s, payload=%s",
name, payload, exc_info=True)
self._manager.every_match(_validate_and_call, type="event", name=name) | [
"def",
"register_event",
"(",
"self",
",",
"name",
",",
"callback",
",",
"validator",
")",
":",
"async",
"def",
"_validate_and_call",
"(",
"message",
")",
":",
"payload",
"=",
"message",
".",
"get",
"(",
"'payload'",
")",
"try",
":",
"payload",
"=",
"val... | Register a callback to receive events.
Every event with the matching name will have its payload validated
using validator and then will be passed to callback if validation
succeeds.
Callback must be a normal callback function, coroutines are not
allowed. If you need to run a coroutine you are free to schedule it
from your callback.
Args:
name (str): The name of the event that we are listening
for
callback (callable): The function that should be called
when a message that matches validator is received.
validator (Verifier): A schema verifier that will
validate a received message uniquely | [
"Register",
"a",
"callback",
"to",
"receive",
"events",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/transport_plugins/websocket/iotile_transport_websocket/generic/async_client.py#L192-L230 |
23,611 | iotile/coretools | transport_plugins/websocket/iotile_transport_websocket/generic/async_client.py | AsyncValidatingWSClient.post_command | def post_command(self, command, args):
"""Post a command asynchronously and don't wait for a response.
There is no notification of any error that could happen during
command execution. A log message will be generated if an error
occurred. The command's response is discarded.
This method is thread-safe and may be called from inside or ouside
of the background event loop. If there is no websockets connection,
no error will be raised (though an error will be logged).
Args:
command (string): The command name
args (dict): Optional arguments
"""
self._loop.log_coroutine(self.send_command(command, args, Verifier())) | python | def post_command(self, command, args):
self._loop.log_coroutine(self.send_command(command, args, Verifier())) | [
"def",
"post_command",
"(",
"self",
",",
"command",
",",
"args",
")",
":",
"self",
".",
"_loop",
".",
"log_coroutine",
"(",
"self",
".",
"send_command",
"(",
"command",
",",
"args",
",",
"Verifier",
"(",
")",
")",
")"
] | Post a command asynchronously and don't wait for a response.
There is no notification of any error that could happen during
command execution. A log message will be generated if an error
occurred. The command's response is discarded.
This method is thread-safe and may be called from inside or ouside
of the background event loop. If there is no websockets connection,
no error will be raised (though an error will be logged).
Args:
command (string): The command name
args (dict): Optional arguments | [
"Post",
"a",
"command",
"asynchronously",
"and",
"don",
"t",
"wait",
"for",
"a",
"response",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/transport_plugins/websocket/iotile_transport_websocket/generic/async_client.py#L232-L248 |
23,612 | iotile/coretools | iotilesensorgraph/iotile/sg/processors.py | copy_all_a | def copy_all_a(input_a, *other_inputs, **kwargs):
"""Copy all readings in input a into the output.
All other inputs are skipped so that after this function runs there are no
readings left in any of the input walkers when the function finishes, even
if it generated no output readings.
Returns:
list(IOTileReading)
"""
output = []
while input_a.count() > 0:
output.append(input_a.pop())
for input_x in other_inputs:
input_x.skip_all()
return output | python | def copy_all_a(input_a, *other_inputs, **kwargs):
output = []
while input_a.count() > 0:
output.append(input_a.pop())
for input_x in other_inputs:
input_x.skip_all()
return output | [
"def",
"copy_all_a",
"(",
"input_a",
",",
"*",
"other_inputs",
",",
"*",
"*",
"kwargs",
")",
":",
"output",
"=",
"[",
"]",
"while",
"input_a",
".",
"count",
"(",
")",
">",
"0",
":",
"output",
".",
"append",
"(",
"input_a",
".",
"pop",
"(",
")",
"... | Copy all readings in input a into the output.
All other inputs are skipped so that after this function runs there are no
readings left in any of the input walkers when the function finishes, even
if it generated no output readings.
Returns:
list(IOTileReading) | [
"Copy",
"all",
"readings",
"in",
"input",
"a",
"into",
"the",
"output",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilesensorgraph/iotile/sg/processors.py#L13-L31 |
23,613 | iotile/coretools | iotilesensorgraph/iotile/sg/processors.py | copy_count_a | def copy_count_a(input_a, *other_inputs, **kwargs):
"""Copy the latest reading from input a into the output.
All other inputs are skipped to that after this function
runs there are no readings left in any of the input walkers
even if no output is generated.
Returns:
list(IOTileReading)
"""
count = input_a.count()
input_a.skip_all();
for input_x in other_inputs:
input_x.skip_all()
return [IOTileReading(0, 0, count)] | python | def copy_count_a(input_a, *other_inputs, **kwargs):
count = input_a.count()
input_a.skip_all();
for input_x in other_inputs:
input_x.skip_all()
return [IOTileReading(0, 0, count)] | [
"def",
"copy_count_a",
"(",
"input_a",
",",
"*",
"other_inputs",
",",
"*",
"*",
"kwargs",
")",
":",
"count",
"=",
"input_a",
".",
"count",
"(",
")",
"input_a",
".",
"skip_all",
"(",
")",
"for",
"input_x",
"in",
"other_inputs",
":",
"input_x",
".",
"ski... | Copy the latest reading from input a into the output.
All other inputs are skipped to that after this function
runs there are no readings left in any of the input walkers
even if no output is generated.
Returns:
list(IOTileReading) | [
"Copy",
"the",
"latest",
"reading",
"from",
"input",
"a",
"into",
"the",
"output",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilesensorgraph/iotile/sg/processors.py#L63-L81 |
23,614 | iotile/coretools | iotilesensorgraph/iotile/sg/processors.py | call_rpc | def call_rpc(*inputs, **kwargs):
"""Call an RPC based on the encoded value read from input b.
The response of the RPC must be a 4 byte value that is used as
the output of this call. The encoded RPC must be a 32 bit value
encoded as "BBH":
B: ignored, should be 0
B: the address of the tile that we should call
H: The id of the RPC to call
All other readings are then skipped so that there are no
readings in any input queue when this function returns
Returns:
list(IOTileReading)
"""
rpc_executor = kwargs['rpc_executor']
output = []
try:
value = inputs[1].pop()
addr = value.value >> 16
rpc_id = value.value & 0xFFFF
reading_value = rpc_executor.rpc(addr, rpc_id)
output.append(IOTileReading(0, 0, reading_value))
except (HardwareError, StreamEmptyError):
pass
for input_x in inputs:
input_x.skip_all()
return output | python | def call_rpc(*inputs, **kwargs):
rpc_executor = kwargs['rpc_executor']
output = []
try:
value = inputs[1].pop()
addr = value.value >> 16
rpc_id = value.value & 0xFFFF
reading_value = rpc_executor.rpc(addr, rpc_id)
output.append(IOTileReading(0, 0, reading_value))
except (HardwareError, StreamEmptyError):
pass
for input_x in inputs:
input_x.skip_all()
return output | [
"def",
"call_rpc",
"(",
"*",
"inputs",
",",
"*",
"*",
"kwargs",
")",
":",
"rpc_executor",
"=",
"kwargs",
"[",
"'rpc_executor'",
"]",
"output",
"=",
"[",
"]",
"try",
":",
"value",
"=",
"inputs",
"[",
"1",
"]",
".",
"pop",
"(",
")",
"addr",
"=",
"v... | Call an RPC based on the encoded value read from input b.
The response of the RPC must be a 4 byte value that is used as
the output of this call. The encoded RPC must be a 32 bit value
encoded as "BBH":
B: ignored, should be 0
B: the address of the tile that we should call
H: The id of the RPC to call
All other readings are then skipped so that there are no
readings in any input queue when this function returns
Returns:
list(IOTileReading) | [
"Call",
"an",
"RPC",
"based",
"on",
"the",
"encoded",
"value",
"read",
"from",
"input",
"b",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilesensorgraph/iotile/sg/processors.py#L84-L118 |
23,615 | iotile/coretools | iotilesensorgraph/iotile/sg/processors.py | trigger_streamer | def trigger_streamer(*inputs, **kwargs):
"""Trigger a streamer based on the index read from input b.
Returns:
list(IOTileReading)
"""
streamer_marker = kwargs['mark_streamer']
try:
reading = inputs[1].pop()
except StreamEmptyError:
return []
finally:
for input_x in inputs:
input_x.skip_all()
try:
streamer_marker(reading.value)
except ArgumentError:
return []
return [IOTileReading(0, 0, 0)] | python | def trigger_streamer(*inputs, **kwargs):
streamer_marker = kwargs['mark_streamer']
try:
reading = inputs[1].pop()
except StreamEmptyError:
return []
finally:
for input_x in inputs:
input_x.skip_all()
try:
streamer_marker(reading.value)
except ArgumentError:
return []
return [IOTileReading(0, 0, 0)] | [
"def",
"trigger_streamer",
"(",
"*",
"inputs",
",",
"*",
"*",
"kwargs",
")",
":",
"streamer_marker",
"=",
"kwargs",
"[",
"'mark_streamer'",
"]",
"try",
":",
"reading",
"=",
"inputs",
"[",
"1",
"]",
".",
"pop",
"(",
")",
"except",
"StreamEmptyError",
":",... | Trigger a streamer based on the index read from input b.
Returns:
list(IOTileReading) | [
"Trigger",
"a",
"streamer",
"based",
"on",
"the",
"index",
"read",
"from",
"input",
"b",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilesensorgraph/iotile/sg/processors.py#L121-L143 |
23,616 | iotile/coretools | iotilesensorgraph/iotile/sg/processors.py | subtract_afromb | def subtract_afromb(*inputs, **kwargs):
"""Subtract stream a from stream b.
Returns:
list(IOTileReading)
"""
try:
value_a = inputs[0].pop()
value_b = inputs[1].pop()
return [IOTileReading(0, 0, value_b.value - value_a.value)]
except StreamEmptyError:
return [] | python | def subtract_afromb(*inputs, **kwargs):
try:
value_a = inputs[0].pop()
value_b = inputs[1].pop()
return [IOTileReading(0, 0, value_b.value - value_a.value)]
except StreamEmptyError:
return [] | [
"def",
"subtract_afromb",
"(",
"*",
"inputs",
",",
"*",
"*",
"kwargs",
")",
":",
"try",
":",
"value_a",
"=",
"inputs",
"[",
"0",
"]",
".",
"pop",
"(",
")",
"value_b",
"=",
"inputs",
"[",
"1",
"]",
".",
"pop",
"(",
")",
"return",
"[",
"IOTileReadi... | Subtract stream a from stream b.
Returns:
list(IOTileReading) | [
"Subtract",
"stream",
"a",
"from",
"stream",
"b",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilesensorgraph/iotile/sg/processors.py#L146-L159 |
23,617 | iotile/coretools | iotileemulate/iotile/emulate/virtual/emulation_mixin.py | _clean_intenum | def _clean_intenum(obj):
"""Remove all IntEnum classes from a map."""
if isinstance(obj, dict):
for key, value in obj.items():
if isinstance(value, IntEnum):
obj[key] = value.value
elif isinstance(value, (dict, list)):
obj[key] = _clean_intenum(value)
elif isinstance(obj, list):
for i, value in enumerate(obj):
if isinstance(value, IntEnum):
obj[i] = value.value
elif isinstance(value, (dict, list)):
obj[i] = _clean_intenum(value)
return obj | python | def _clean_intenum(obj):
if isinstance(obj, dict):
for key, value in obj.items():
if isinstance(value, IntEnum):
obj[key] = value.value
elif isinstance(value, (dict, list)):
obj[key] = _clean_intenum(value)
elif isinstance(obj, list):
for i, value in enumerate(obj):
if isinstance(value, IntEnum):
obj[i] = value.value
elif isinstance(value, (dict, list)):
obj[i] = _clean_intenum(value)
return obj | [
"def",
"_clean_intenum",
"(",
"obj",
")",
":",
"if",
"isinstance",
"(",
"obj",
",",
"dict",
")",
":",
"for",
"key",
",",
"value",
"in",
"obj",
".",
"items",
"(",
")",
":",
"if",
"isinstance",
"(",
"value",
",",
"IntEnum",
")",
":",
"obj",
"[",
"k... | Remove all IntEnum classes from a map. | [
"Remove",
"all",
"IntEnum",
"classes",
"from",
"a",
"map",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotileemulate/iotile/emulate/virtual/emulation_mixin.py#L177-L193 |
23,618 | iotile/coretools | iotileemulate/iotile/emulate/virtual/emulation_mixin.py | EmulationMixin._track_change | def _track_change(self, name, value, formatter=None):
"""Track that a change happened.
This function is only needed for manually recording changes that are
not captured by changes to properties of this object that are tracked
automatically. Classes that inherit from `emulation_mixin` should
use this function to record interesting changes in their internal
state or events that happen.
The `value` parameter that you pass here should be a native python
object best representing what the value of the property that changed
is. When saved to disk, it will be converted to a string using:
`str(value)`. If you do not like the string that would result from
such a call, you can pass a custom formatter that will be called as
`formatter(value)` and must return a string.
Args:
name (str): The name of the property that changed.
value (object): The new value of the property.
formatter (callable): Optional function to convert value to a
string. This function will only be called if track_changes()
is enabled and `name` is on the whitelist for properties that
should be tracked. If `formatter` is not passed or is None,
it will default to `str`
"""
self._emulation_log.track_change(self._emulation_address, name, value, formatter) | python | def _track_change(self, name, value, formatter=None):
self._emulation_log.track_change(self._emulation_address, name, value, formatter) | [
"def",
"_track_change",
"(",
"self",
",",
"name",
",",
"value",
",",
"formatter",
"=",
"None",
")",
":",
"self",
".",
"_emulation_log",
".",
"track_change",
"(",
"self",
".",
"_emulation_address",
",",
"name",
",",
"value",
",",
"formatter",
")"
] | Track that a change happened.
This function is only needed for manually recording changes that are
not captured by changes to properties of this object that are tracked
automatically. Classes that inherit from `emulation_mixin` should
use this function to record interesting changes in their internal
state or events that happen.
The `value` parameter that you pass here should be a native python
object best representing what the value of the property that changed
is. When saved to disk, it will be converted to a string using:
`str(value)`. If you do not like the string that would result from
such a call, you can pass a custom formatter that will be called as
`formatter(value)` and must return a string.
Args:
name (str): The name of the property that changed.
value (object): The new value of the property.
formatter (callable): Optional function to convert value to a
string. This function will only be called if track_changes()
is enabled and `name` is on the whitelist for properties that
should be tracked. If `formatter` is not passed or is None,
it will default to `str` | [
"Track",
"that",
"a",
"change",
"happened",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotileemulate/iotile/emulate/virtual/emulation_mixin.py#L50-L76 |
23,619 | iotile/coretools | iotileemulate/iotile/emulate/virtual/emulation_mixin.py | EmulationMixin.save_state | def save_state(self, out_path):
"""Save the current state of this emulated object to a file.
Args:
out_path (str): The path to save the dumped state of this emulated
object.
"""
state = self.dump_state()
# Remove all IntEnums from state since they cannot be json-serialized on python 2.7
# See https://bitbucket.org/stoneleaf/enum34/issues/17/difference-between-enum34-and-enum-json
state = _clean_intenum(state)
with open(out_path, "w") as outfile:
json.dump(state, outfile, indent=4) | python | def save_state(self, out_path):
state = self.dump_state()
# Remove all IntEnums from state since they cannot be json-serialized on python 2.7
# See https://bitbucket.org/stoneleaf/enum34/issues/17/difference-between-enum34-and-enum-json
state = _clean_intenum(state)
with open(out_path, "w") as outfile:
json.dump(state, outfile, indent=4) | [
"def",
"save_state",
"(",
"self",
",",
"out_path",
")",
":",
"state",
"=",
"self",
".",
"dump_state",
"(",
")",
"# Remove all IntEnums from state since they cannot be json-serialized on python 2.7",
"# See https://bitbucket.org/stoneleaf/enum34/issues/17/difference-between-enum34-and... | Save the current state of this emulated object to a file.
Args:
out_path (str): The path to save the dumped state of this emulated
object. | [
"Save",
"the",
"current",
"state",
"of",
"this",
"emulated",
"object",
"to",
"a",
"file",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotileemulate/iotile/emulate/virtual/emulation_mixin.py#L96-L111 |
23,620 | iotile/coretools | iotileemulate/iotile/emulate/virtual/emulation_mixin.py | EmulationMixin.load_state | def load_state(self, in_path):
"""Load the current state of this emulated object from a file.
The file should have been produced by a previous call to save_state.
Args:
in_path (str): The path to the saved state dump that you wish
to load.
"""
with open(in_path, "r") as infile:
state = json.load(infile)
self.restore_state(state) | python | def load_state(self, in_path):
with open(in_path, "r") as infile:
state = json.load(infile)
self.restore_state(state) | [
"def",
"load_state",
"(",
"self",
",",
"in_path",
")",
":",
"with",
"open",
"(",
"in_path",
",",
"\"r\"",
")",
"as",
"infile",
":",
"state",
"=",
"json",
".",
"load",
"(",
"infile",
")",
"self",
".",
"restore_state",
"(",
"state",
")"
] | Load the current state of this emulated object from a file.
The file should have been produced by a previous call to save_state.
Args:
in_path (str): The path to the saved state dump that you wish
to load. | [
"Load",
"the",
"current",
"state",
"of",
"this",
"emulated",
"object",
"from",
"a",
"file",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotileemulate/iotile/emulate/virtual/emulation_mixin.py#L113-L126 |
23,621 | iotile/coretools | iotileemulate/iotile/emulate/virtual/emulation_mixin.py | EmulationMixin.load_scenario | def load_scenario(self, scenario_name, **kwargs):
"""Load a scenario into the emulated object.
Scenarios are specific states of an an object that can be customized
with keyword parameters. Typical examples are:
- data logger with full storage
- device with low battery indication on
Args:
scenario_name (str): The name of the scenario that we wish to
load.
**kwargs: Any arguments that should be passed to configure
the scenario. These arguments will be passed directly
to the scenario handler.
"""
scenario = self._known_scenarios.get(scenario_name)
if scenario is None:
raise ArgumentError("Unknown scenario %s" % scenario_name, known_scenarios=list(self._known_scenarios))
scenario(**kwargs) | python | def load_scenario(self, scenario_name, **kwargs):
scenario = self._known_scenarios.get(scenario_name)
if scenario is None:
raise ArgumentError("Unknown scenario %s" % scenario_name, known_scenarios=list(self._known_scenarios))
scenario(**kwargs) | [
"def",
"load_scenario",
"(",
"self",
",",
"scenario_name",
",",
"*",
"*",
"kwargs",
")",
":",
"scenario",
"=",
"self",
".",
"_known_scenarios",
".",
"get",
"(",
"scenario_name",
")",
"if",
"scenario",
"is",
"None",
":",
"raise",
"ArgumentError",
"(",
"\"Un... | Load a scenario into the emulated object.
Scenarios are specific states of an an object that can be customized
with keyword parameters. Typical examples are:
- data logger with full storage
- device with low battery indication on
Args:
scenario_name (str): The name of the scenario that we wish to
load.
**kwargs: Any arguments that should be passed to configure
the scenario. These arguments will be passed directly
to the scenario handler. | [
"Load",
"a",
"scenario",
"into",
"the",
"emulated",
"object",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotileemulate/iotile/emulate/virtual/emulation_mixin.py#L128-L149 |
23,622 | iotile/coretools | iotileemulate/iotile/emulate/virtual/emulation_mixin.py | EmulationMixin.register_scenario | def register_scenario(self, scenario_name, handler):
"""Register a scenario handler for this object.
Scenario handlers are callable functions with no positional arguments
that can be called by name with the load_scenario function and should
prepare the emulated object into a known state. The purpose of a
scenario is to make it easy to get a device into a specific state for
testing purposes that may otherwise be difficult or time consuming to
prepare on the physical, non-emulated device.
Args:
scenario_name (str): The name of this scenario that can be passed to
load_scenario later in order to invoke the scenario.
handler (callable): A callable function that takes no positional
arguments and can prepare this object into the given scenario
state. It may take required or optional keyword arguments that
may be passed to `load_scenario` if needed.
"""
if scenario_name in self._known_scenarios:
raise ArgumentError("Attempted to add the same scenario name twice", scenario_name=scenario_name,
previous_handler=self._known_scenarios[scenario_name])
self._known_scenarios[scenario_name] = handler | python | def register_scenario(self, scenario_name, handler):
if scenario_name in self._known_scenarios:
raise ArgumentError("Attempted to add the same scenario name twice", scenario_name=scenario_name,
previous_handler=self._known_scenarios[scenario_name])
self._known_scenarios[scenario_name] = handler | [
"def",
"register_scenario",
"(",
"self",
",",
"scenario_name",
",",
"handler",
")",
":",
"if",
"scenario_name",
"in",
"self",
".",
"_known_scenarios",
":",
"raise",
"ArgumentError",
"(",
"\"Attempted to add the same scenario name twice\"",
",",
"scenario_name",
"=",
"... | Register a scenario handler for this object.
Scenario handlers are callable functions with no positional arguments
that can be called by name with the load_scenario function and should
prepare the emulated object into a known state. The purpose of a
scenario is to make it easy to get a device into a specific state for
testing purposes that may otherwise be difficult or time consuming to
prepare on the physical, non-emulated device.
Args:
scenario_name (str): The name of this scenario that can be passed to
load_scenario later in order to invoke the scenario.
handler (callable): A callable function that takes no positional
arguments and can prepare this object into the given scenario
state. It may take required or optional keyword arguments that
may be passed to `load_scenario` if needed. | [
"Register",
"a",
"scenario",
"handler",
"for",
"this",
"object",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotileemulate/iotile/emulate/virtual/emulation_mixin.py#L151-L174 |
23,623 | iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/hpcc.py | generate | def generate(env):
"""Add Builders and construction variables for aCC & cc to an Environment."""
cc.generate(env)
env['CXX'] = 'aCC'
env['SHCCFLAGS'] = SCons.Util.CLVar('$CCFLAGS +Z') | python | def generate(env):
cc.generate(env)
env['CXX'] = 'aCC'
env['SHCCFLAGS'] = SCons.Util.CLVar('$CCFLAGS +Z') | [
"def",
"generate",
"(",
"env",
")",
":",
"cc",
".",
"generate",
"(",
"env",
")",
"env",
"[",
"'CXX'",
"]",
"=",
"'aCC'",
"env",
"[",
"'SHCCFLAGS'",
"]",
"=",
"SCons",
".",
"Util",
".",
"CLVar",
"(",
"'$CCFLAGS +Z'",
")"
] | Add Builders and construction variables for aCC & cc to an Environment. | [
"Add",
"Builders",
"and",
"construction",
"variables",
"for",
"aCC",
"&",
"cc",
"to",
"an",
"Environment",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/hpcc.py#L39-L44 |
23,624 | iotile/coretools | iotilesensorgraph/iotile/sg/optimizer/optimizer.py | SensorGraphOptimizer.add_pass | def add_pass(self, name, opt_pass, before=None, after=None):
"""Add an optimization pass to the optimizer.
Optimization passes have a name that allows them
to be enabled or disabled by name. By default all
optimization passed are enabled and unordered. You can
explicitly specify passes by name that this pass must run
before or after this passs so that they can be properly
ordered.
Args:
name (str): The name of the optimization pass to allow for
enabling/disabling it by name
opt_pass (OptimizationPass): The optimization pass class itself
before (list(str)): A list of the passes that this pass should
run before.
after (list(str)): A list of the passes that this pass should
run after.
"""
if before is None:
before = []
if after is None:
after = []
self._known_passes[name] = (opt_pass, before, after) | python | def add_pass(self, name, opt_pass, before=None, after=None):
if before is None:
before = []
if after is None:
after = []
self._known_passes[name] = (opt_pass, before, after) | [
"def",
"add_pass",
"(",
"self",
",",
"name",
",",
"opt_pass",
",",
"before",
"=",
"None",
",",
"after",
"=",
"None",
")",
":",
"if",
"before",
"is",
"None",
":",
"before",
"=",
"[",
"]",
"if",
"after",
"is",
"None",
":",
"after",
"=",
"[",
"]",
... | Add an optimization pass to the optimizer.
Optimization passes have a name that allows them
to be enabled or disabled by name. By default all
optimization passed are enabled and unordered. You can
explicitly specify passes by name that this pass must run
before or after this passs so that they can be properly
ordered.
Args:
name (str): The name of the optimization pass to allow for
enabling/disabling it by name
opt_pass (OptimizationPass): The optimization pass class itself
before (list(str)): A list of the passes that this pass should
run before.
after (list(str)): A list of the passes that this pass should
run after. | [
"Add",
"an",
"optimization",
"pass",
"to",
"the",
"optimizer",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilesensorgraph/iotile/sg/optimizer/optimizer.py#L24-L49 |
23,625 | iotile/coretools | iotilesensorgraph/iotile/sg/optimizer/optimizer.py | SensorGraphOptimizer._order_pases | def _order_pases(self, passes):
"""Topologically sort optimization passes.
This ensures that the resulting passes are run in order
respecting before/after constraints.
Args:
passes (iterable): An iterable of pass names that should
be included in the optimization passes run.
"""
passes = set(passes)
pass_deps = {}
for opt in passes:
_, before, after = self._known_passes[opt]
if opt not in pass_deps:
pass_deps[opt] = set()
for after_pass in after:
pass_deps[opt].add(after_pass)
# For passes that we are before, we may need to
# preemptively add them to the list early
for other in before:
if other not in passes:
continue
if other not in pass_deps:
pass_deps[other] = set()
pass_deps[other].add(opt)
return toposort_flatten(pass_deps) | python | def _order_pases(self, passes):
passes = set(passes)
pass_deps = {}
for opt in passes:
_, before, after = self._known_passes[opt]
if opt not in pass_deps:
pass_deps[opt] = set()
for after_pass in after:
pass_deps[opt].add(after_pass)
# For passes that we are before, we may need to
# preemptively add them to the list early
for other in before:
if other not in passes:
continue
if other not in pass_deps:
pass_deps[other] = set()
pass_deps[other].add(opt)
return toposort_flatten(pass_deps) | [
"def",
"_order_pases",
"(",
"self",
",",
"passes",
")",
":",
"passes",
"=",
"set",
"(",
"passes",
")",
"pass_deps",
"=",
"{",
"}",
"for",
"opt",
"in",
"passes",
":",
"_",
",",
"before",
",",
"after",
"=",
"self",
".",
"_known_passes",
"[",
"opt",
"... | Topologically sort optimization passes.
This ensures that the resulting passes are run in order
respecting before/after constraints.
Args:
passes (iterable): An iterable of pass names that should
be included in the optimization passes run. | [
"Topologically",
"sort",
"optimization",
"passes",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilesensorgraph/iotile/sg/optimizer/optimizer.py#L51-L86 |
23,626 | iotile/coretools | iotilesensorgraph/iotile/sg/optimizer/optimizer.py | SensorGraphOptimizer.optimize | def optimize(self, sensor_graph, model):
"""Optimize a sensor graph by running optimization passes.
The passes are run one at a time and modify the sensor graph
for future passes.
Args:
sensor_graph (SensorGraph): The graph to be optimized
model (DeviceModel): The device that we are optimizing
for, that OptimizationPass objects are free to use
to guide their optimizations.
"""
passes = self._order_pases(self._known_passes.keys())
for opt_name in passes:
rerun = True
pass_instance = self._known_passes[opt_name][0]()
while rerun:
rerun = pass_instance.run(sensor_graph, model=model) | python | def optimize(self, sensor_graph, model):
passes = self._order_pases(self._known_passes.keys())
for opt_name in passes:
rerun = True
pass_instance = self._known_passes[opt_name][0]()
while rerun:
rerun = pass_instance.run(sensor_graph, model=model) | [
"def",
"optimize",
"(",
"self",
",",
"sensor_graph",
",",
"model",
")",
":",
"passes",
"=",
"self",
".",
"_order_pases",
"(",
"self",
".",
"_known_passes",
".",
"keys",
"(",
")",
")",
"for",
"opt_name",
"in",
"passes",
":",
"rerun",
"=",
"True",
"pass_... | Optimize a sensor graph by running optimization passes.
The passes are run one at a time and modify the sensor graph
for future passes.
Args:
sensor_graph (SensorGraph): The graph to be optimized
model (DeviceModel): The device that we are optimizing
for, that OptimizationPass objects are free to use
to guide their optimizations. | [
"Optimize",
"a",
"sensor",
"graph",
"by",
"running",
"optimization",
"passes",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilesensorgraph/iotile/sg/optimizer/optimizer.py#L88-L108 |
23,627 | iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Script/SConscript.py | get_calling_namespaces | def get_calling_namespaces():
"""Return the locals and globals for the function that called
into this module in the current call stack."""
try: 1//0
except ZeroDivisionError:
# Don't start iterating with the current stack-frame to
# prevent creating reference cycles (f_back is safe).
frame = sys.exc_info()[2].tb_frame.f_back
# Find the first frame that *isn't* from this file. This means
# that we expect all of the SCons frames that implement an Export()
# or SConscript() call to be in this file, so that we can identify
# the first non-Script.SConscript frame as the user's local calling
# environment, and the locals and globals dictionaries from that
# frame as the calling namespaces. See the comment below preceding
# the DefaultEnvironmentCall block for even more explanation.
while frame.f_globals.get("__name__") == __name__:
frame = frame.f_back
return frame.f_locals, frame.f_globals | python | def get_calling_namespaces():
try: 1//0
except ZeroDivisionError:
# Don't start iterating with the current stack-frame to
# prevent creating reference cycles (f_back is safe).
frame = sys.exc_info()[2].tb_frame.f_back
# Find the first frame that *isn't* from this file. This means
# that we expect all of the SCons frames that implement an Export()
# or SConscript() call to be in this file, so that we can identify
# the first non-Script.SConscript frame as the user's local calling
# environment, and the locals and globals dictionaries from that
# frame as the calling namespaces. See the comment below preceding
# the DefaultEnvironmentCall block for even more explanation.
while frame.f_globals.get("__name__") == __name__:
frame = frame.f_back
return frame.f_locals, frame.f_globals | [
"def",
"get_calling_namespaces",
"(",
")",
":",
"try",
":",
"1",
"//",
"0",
"except",
"ZeroDivisionError",
":",
"# Don't start iterating with the current stack-frame to",
"# prevent creating reference cycles (f_back is safe).",
"frame",
"=",
"sys",
".",
"exc_info",
"(",
")"... | Return the locals and globals for the function that called
into this module in the current call stack. | [
"Return",
"the",
"locals",
"and",
"globals",
"for",
"the",
"function",
"that",
"called",
"into",
"this",
"module",
"in",
"the",
"current",
"call",
"stack",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Script/SConscript.py#L70-L89 |
23,628 | iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Script/SConscript.py | annotate | def annotate(node):
"""Annotate a node with the stack frame describing the
SConscript file and line number that created it."""
tb = sys.exc_info()[2]
while tb and stack_bottom not in tb.tb_frame.f_locals:
tb = tb.tb_next
if not tb:
# We did not find any exec of an SConscript file: what?!
raise SCons.Errors.InternalError("could not find SConscript stack frame")
node.creator = traceback.extract_stack(tb)[0] | python | def annotate(node):
tb = sys.exc_info()[2]
while tb and stack_bottom not in tb.tb_frame.f_locals:
tb = tb.tb_next
if not tb:
# We did not find any exec of an SConscript file: what?!
raise SCons.Errors.InternalError("could not find SConscript stack frame")
node.creator = traceback.extract_stack(tb)[0] | [
"def",
"annotate",
"(",
"node",
")",
":",
"tb",
"=",
"sys",
".",
"exc_info",
"(",
")",
"[",
"2",
"]",
"while",
"tb",
"and",
"stack_bottom",
"not",
"in",
"tb",
".",
"tb_frame",
".",
"f_locals",
":",
"tb",
"=",
"tb",
".",
"tb_next",
"if",
"not",
"t... | Annotate a node with the stack frame describing the
SConscript file and line number that created it. | [
"Annotate",
"a",
"node",
"with",
"the",
"stack",
"frame",
"describing",
"the",
"SConscript",
"file",
"and",
"line",
"number",
"that",
"created",
"it",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Script/SConscript.py#L331-L340 |
23,629 | iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Script/SConscript.py | BuildDefaultGlobals | def BuildDefaultGlobals():
"""
Create a dictionary containing all the default globals for
SConstruct and SConscript files.
"""
global GlobalDict
if GlobalDict is None:
GlobalDict = {}
import SCons.Script
d = SCons.Script.__dict__
def not_a_module(m, d=d, mtype=type(SCons.Script)):
return not isinstance(d[m], mtype)
for m in filter(not_a_module, dir(SCons.Script)):
GlobalDict[m] = d[m]
return GlobalDict.copy() | python | def BuildDefaultGlobals():
global GlobalDict
if GlobalDict is None:
GlobalDict = {}
import SCons.Script
d = SCons.Script.__dict__
def not_a_module(m, d=d, mtype=type(SCons.Script)):
return not isinstance(d[m], mtype)
for m in filter(not_a_module, dir(SCons.Script)):
GlobalDict[m] = d[m]
return GlobalDict.copy() | [
"def",
"BuildDefaultGlobals",
"(",
")",
":",
"global",
"GlobalDict",
"if",
"GlobalDict",
"is",
"None",
":",
"GlobalDict",
"=",
"{",
"}",
"import",
"SCons",
".",
"Script",
"d",
"=",
"SCons",
".",
"Script",
".",
"__dict__",
"def",
"not_a_module",
"(",
"m",
... | Create a dictionary containing all the default globals for
SConstruct and SConscript files. | [
"Create",
"a",
"dictionary",
"containing",
"all",
"the",
"default",
"globals",
"for",
"SConstruct",
"and",
"SConscript",
"files",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Script/SConscript.py#L617-L634 |
23,630 | iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Script/SConscript.py | SConsEnvironment._exceeds_version | def _exceeds_version(self, major, minor, v_major, v_minor):
"""Return 1 if 'major' and 'minor' are greater than the version
in 'v_major' and 'v_minor', and 0 otherwise."""
return (major > v_major or (major == v_major and minor > v_minor)) | python | def _exceeds_version(self, major, minor, v_major, v_minor):
return (major > v_major or (major == v_major and minor > v_minor)) | [
"def",
"_exceeds_version",
"(",
"self",
",",
"major",
",",
"minor",
",",
"v_major",
",",
"v_minor",
")",
":",
"return",
"(",
"major",
">",
"v_major",
"or",
"(",
"major",
"==",
"v_major",
"and",
"minor",
">",
"v_minor",
")",
")"
] | Return 1 if 'major' and 'minor' are greater than the version
in 'v_major' and 'v_minor', and 0 otherwise. | [
"Return",
"1",
"if",
"major",
"and",
"minor",
"are",
"greater",
"than",
"the",
"version",
"in",
"v_major",
"and",
"v_minor",
"and",
"0",
"otherwise",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Script/SConscript.py#L359-L362 |
23,631 | iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Script/SConscript.py | SConsEnvironment.EnsureSConsVersion | def EnsureSConsVersion(self, major, minor, revision=0):
"""Exit abnormally if the SCons version is not late enough."""
# split string to avoid replacement during build process
if SCons.__version__ == '__' + 'VERSION__':
SCons.Warnings.warn(SCons.Warnings.DevelopmentVersionWarning,
"EnsureSConsVersion is ignored for development version")
return
scons_ver = self._get_major_minor_revision(SCons.__version__)
if scons_ver < (major, minor, revision):
if revision:
scons_ver_string = '%d.%d.%d' % (major, minor, revision)
else:
scons_ver_string = '%d.%d' % (major, minor)
print("SCons %s or greater required, but you have SCons %s" % \
(scons_ver_string, SCons.__version__))
sys.exit(2) | python | def EnsureSConsVersion(self, major, minor, revision=0):
# split string to avoid replacement during build process
if SCons.__version__ == '__' + 'VERSION__':
SCons.Warnings.warn(SCons.Warnings.DevelopmentVersionWarning,
"EnsureSConsVersion is ignored for development version")
return
scons_ver = self._get_major_minor_revision(SCons.__version__)
if scons_ver < (major, minor, revision):
if revision:
scons_ver_string = '%d.%d.%d' % (major, minor, revision)
else:
scons_ver_string = '%d.%d' % (major, minor)
print("SCons %s or greater required, but you have SCons %s" % \
(scons_ver_string, SCons.__version__))
sys.exit(2) | [
"def",
"EnsureSConsVersion",
"(",
"self",
",",
"major",
",",
"minor",
",",
"revision",
"=",
"0",
")",
":",
"# split string to avoid replacement during build process",
"if",
"SCons",
".",
"__version__",
"==",
"'__'",
"+",
"'VERSION__'",
":",
"SCons",
".",
"Warnings... | Exit abnormally if the SCons version is not late enough. | [
"Exit",
"abnormally",
"if",
"the",
"SCons",
"version",
"is",
"not",
"late",
"enough",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Script/SConscript.py#L462-L477 |
23,632 | iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Script/SConscript.py | SConsEnvironment.EnsurePythonVersion | def EnsurePythonVersion(self, major, minor):
"""Exit abnormally if the Python version is not late enough."""
if sys.version_info < (major, minor):
v = sys.version.split()[0]
print("Python %d.%d or greater required, but you have Python %s" %(major,minor,v))
sys.exit(2) | python | def EnsurePythonVersion(self, major, minor):
if sys.version_info < (major, minor):
v = sys.version.split()[0]
print("Python %d.%d or greater required, but you have Python %s" %(major,minor,v))
sys.exit(2) | [
"def",
"EnsurePythonVersion",
"(",
"self",
",",
"major",
",",
"minor",
")",
":",
"if",
"sys",
".",
"version_info",
"<",
"(",
"major",
",",
"minor",
")",
":",
"v",
"=",
"sys",
".",
"version",
".",
"split",
"(",
")",
"[",
"0",
"]",
"print",
"(",
"\... | Exit abnormally if the Python version is not late enough. | [
"Exit",
"abnormally",
"if",
"the",
"Python",
"version",
"is",
"not",
"late",
"enough",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Script/SConscript.py#L479-L484 |
23,633 | iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/msvc.py | validate_vars | def validate_vars(env):
"""Validate the PCH and PCHSTOP construction variables."""
if 'PCH' in env and env['PCH']:
if 'PCHSTOP' not in env:
raise SCons.Errors.UserError("The PCHSTOP construction must be defined if PCH is defined.")
if not SCons.Util.is_String(env['PCHSTOP']):
raise SCons.Errors.UserError("The PCHSTOP construction variable must be a string: %r"%env['PCHSTOP']) | python | def validate_vars(env):
if 'PCH' in env and env['PCH']:
if 'PCHSTOP' not in env:
raise SCons.Errors.UserError("The PCHSTOP construction must be defined if PCH is defined.")
if not SCons.Util.is_String(env['PCHSTOP']):
raise SCons.Errors.UserError("The PCHSTOP construction variable must be a string: %r"%env['PCHSTOP']) | [
"def",
"validate_vars",
"(",
"env",
")",
":",
"if",
"'PCH'",
"in",
"env",
"and",
"env",
"[",
"'PCH'",
"]",
":",
"if",
"'PCHSTOP'",
"not",
"in",
"env",
":",
"raise",
"SCons",
".",
"Errors",
".",
"UserError",
"(",
"\"The PCHSTOP construction must be defined if... | Validate the PCH and PCHSTOP construction variables. | [
"Validate",
"the",
"PCH",
"and",
"PCHSTOP",
"construction",
"variables",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/msvc.py#L55-L61 |
23,634 | iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/msvc.py | msvc_set_PCHPDBFLAGS | def msvc_set_PCHPDBFLAGS(env):
"""
Set appropriate PCHPDBFLAGS for the MSVC version being used.
"""
if env.get('MSVC_VERSION',False):
maj, min = msvc_version_to_maj_min(env['MSVC_VERSION'])
if maj < 8:
env['PCHPDBFLAGS'] = SCons.Util.CLVar(['${(PDB and "/Yd") or ""}'])
else:
env['PCHPDBFLAGS'] = ''
else:
# Default if we can't determine which version of MSVC we're using
env['PCHPDBFLAGS'] = SCons.Util.CLVar(['${(PDB and "/Yd") or ""}']) | python | def msvc_set_PCHPDBFLAGS(env):
if env.get('MSVC_VERSION',False):
maj, min = msvc_version_to_maj_min(env['MSVC_VERSION'])
if maj < 8:
env['PCHPDBFLAGS'] = SCons.Util.CLVar(['${(PDB and "/Yd") or ""}'])
else:
env['PCHPDBFLAGS'] = ''
else:
# Default if we can't determine which version of MSVC we're using
env['PCHPDBFLAGS'] = SCons.Util.CLVar(['${(PDB and "/Yd") or ""}']) | [
"def",
"msvc_set_PCHPDBFLAGS",
"(",
"env",
")",
":",
"if",
"env",
".",
"get",
"(",
"'MSVC_VERSION'",
",",
"False",
")",
":",
"maj",
",",
"min",
"=",
"msvc_version_to_maj_min",
"(",
"env",
"[",
"'MSVC_VERSION'",
"]",
")",
"if",
"maj",
"<",
"8",
":",
"en... | Set appropriate PCHPDBFLAGS for the MSVC version being used. | [
"Set",
"appropriate",
"PCHPDBFLAGS",
"for",
"the",
"MSVC",
"version",
"being",
"used",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/msvc.py#L63-L75 |
23,635 | iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/msvc.py | pch_emitter | def pch_emitter(target, source, env):
"""Adds the object file target."""
validate_vars(env)
pch = None
obj = None
for t in target:
if SCons.Util.splitext(str(t))[1] == '.pch':
pch = t
if SCons.Util.splitext(str(t))[1] == '.obj':
obj = t
if not obj:
obj = SCons.Util.splitext(str(pch))[0]+'.obj'
target = [pch, obj] # pch must be first, and obj second for the PCHCOM to work
return (target, source) | python | def pch_emitter(target, source, env):
validate_vars(env)
pch = None
obj = None
for t in target:
if SCons.Util.splitext(str(t))[1] == '.pch':
pch = t
if SCons.Util.splitext(str(t))[1] == '.obj':
obj = t
if not obj:
obj = SCons.Util.splitext(str(pch))[0]+'.obj'
target = [pch, obj] # pch must be first, and obj second for the PCHCOM to work
return (target, source) | [
"def",
"pch_emitter",
"(",
"target",
",",
"source",
",",
"env",
")",
":",
"validate_vars",
"(",
"env",
")",
"pch",
"=",
"None",
"obj",
"=",
"None",
"for",
"t",
"in",
"target",
":",
"if",
"SCons",
".",
"Util",
".",
"splitext",
"(",
"str",
"(",
"t",
... | Adds the object file target. | [
"Adds",
"the",
"object",
"file",
"target",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/msvc.py#L78-L97 |
23,636 | iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/msvc.py | object_emitter | def object_emitter(target, source, env, parent_emitter):
"""Sets up the PCH dependencies for an object file."""
validate_vars(env)
parent_emitter(target, source, env)
# Add a dependency, but only if the target (e.g. 'Source1.obj')
# doesn't correspond to the pre-compiled header ('Source1.pch').
# If the basenames match, then this was most likely caused by
# someone adding the source file to both the env.PCH() and the
# env.Program() calls, and adding the explicit dependency would
# cause a cycle on the .pch file itself.
#
# See issue #2505 for a discussion of what to do if it turns
# out this assumption causes trouble in the wild:
# http://scons.tigris.org/issues/show_bug.cgi?id=2505
if 'PCH' in env:
pch = env['PCH']
if str(target[0]) != SCons.Util.splitext(str(pch))[0] + '.obj':
env.Depends(target, pch)
return (target, source) | python | def object_emitter(target, source, env, parent_emitter):
validate_vars(env)
parent_emitter(target, source, env)
# Add a dependency, but only if the target (e.g. 'Source1.obj')
# doesn't correspond to the pre-compiled header ('Source1.pch').
# If the basenames match, then this was most likely caused by
# someone adding the source file to both the env.PCH() and the
# env.Program() calls, and adding the explicit dependency would
# cause a cycle on the .pch file itself.
#
# See issue #2505 for a discussion of what to do if it turns
# out this assumption causes trouble in the wild:
# http://scons.tigris.org/issues/show_bug.cgi?id=2505
if 'PCH' in env:
pch = env['PCH']
if str(target[0]) != SCons.Util.splitext(str(pch))[0] + '.obj':
env.Depends(target, pch)
return (target, source) | [
"def",
"object_emitter",
"(",
"target",
",",
"source",
",",
"env",
",",
"parent_emitter",
")",
":",
"validate_vars",
"(",
"env",
")",
"parent_emitter",
"(",
"target",
",",
"source",
",",
"env",
")",
"# Add a dependency, but only if the target (e.g. 'Source1.obj')",
... | Sets up the PCH dependencies for an object file. | [
"Sets",
"up",
"the",
"PCH",
"dependencies",
"for",
"an",
"object",
"file",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/msvc.py#L99-L121 |
23,637 | iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/msvc.py | msvc_batch_key | def msvc_batch_key(action, env, target, source):
"""
Returns a key to identify unique batches of sources for compilation.
If batching is enabled (via the $MSVC_BATCH setting), then all
target+source pairs that use the same action, defined by the same
environment, and have the same target and source directories, will
be batched.
Returning None specifies that the specified target+source should not
be batched with other compilations.
"""
# Fixing MSVC_BATCH mode. Previous if did not work when MSVC_BATCH
# was set to False. This new version should work better.
# Note we need to do the env.subst so $MSVC_BATCH can be a reference to
# another construction variable, which is why we test for False and 0
# as strings.
if not 'MSVC_BATCH' in env or env.subst('$MSVC_BATCH') in ('0', 'False', '', None):
# We're not using batching; return no key.
return None
t = target[0]
s = source[0]
if os.path.splitext(t.name)[0] != os.path.splitext(s.name)[0]:
# The base names are different, so this *must* be compiled
# separately; return no key.
return None
return (id(action), id(env), t.dir, s.dir) | python | def msvc_batch_key(action, env, target, source):
# Fixing MSVC_BATCH mode. Previous if did not work when MSVC_BATCH
# was set to False. This new version should work better.
# Note we need to do the env.subst so $MSVC_BATCH can be a reference to
# another construction variable, which is why we test for False and 0
# as strings.
if not 'MSVC_BATCH' in env or env.subst('$MSVC_BATCH') in ('0', 'False', '', None):
# We're not using batching; return no key.
return None
t = target[0]
s = source[0]
if os.path.splitext(t.name)[0] != os.path.splitext(s.name)[0]:
# The base names are different, so this *must* be compiled
# separately; return no key.
return None
return (id(action), id(env), t.dir, s.dir) | [
"def",
"msvc_batch_key",
"(",
"action",
",",
"env",
",",
"target",
",",
"source",
")",
":",
"# Fixing MSVC_BATCH mode. Previous if did not work when MSVC_BATCH",
"# was set to False. This new version should work better.",
"# Note we need to do the env.subst so $MSVC_BATCH can be a refere... | Returns a key to identify unique batches of sources for compilation.
If batching is enabled (via the $MSVC_BATCH setting), then all
target+source pairs that use the same action, defined by the same
environment, and have the same target and source directories, will
be batched.
Returning None specifies that the specified target+source should not
be batched with other compilations. | [
"Returns",
"a",
"key",
"to",
"identify",
"unique",
"batches",
"of",
"sources",
"for",
"compilation",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/msvc.py#L146-L173 |
23,638 | iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/msvc.py | generate | def generate(env):
"""Add Builders and construction variables for MSVC++ to an Environment."""
static_obj, shared_obj = SCons.Tool.createObjBuilders(env)
# TODO(batch): shouldn't reach in to cmdgen this way; necessary
# for now to bypass the checks in Builder.DictCmdGenerator.__call__()
# and allow .cc and .cpp to be compiled in the same command line.
static_obj.cmdgen.source_ext_match = False
shared_obj.cmdgen.source_ext_match = False
for suffix in CSuffixes:
static_obj.add_action(suffix, CAction)
shared_obj.add_action(suffix, ShCAction)
static_obj.add_emitter(suffix, static_object_emitter)
shared_obj.add_emitter(suffix, shared_object_emitter)
for suffix in CXXSuffixes:
static_obj.add_action(suffix, CXXAction)
shared_obj.add_action(suffix, ShCXXAction)
static_obj.add_emitter(suffix, static_object_emitter)
shared_obj.add_emitter(suffix, shared_object_emitter)
env['CCPDBFLAGS'] = SCons.Util.CLVar(['${(PDB and "/Z7") or ""}'])
env['CCPCHFLAGS'] = SCons.Util.CLVar(['${(PCH and "/Yu%s \\\"/Fp%s\\\""%(PCHSTOP or "",File(PCH))) or ""}'])
env['_MSVC_OUTPUT_FLAG'] = msvc_output_flag
env['_CCCOMCOM'] = '$CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS $CCPCHFLAGS $CCPDBFLAGS'
env['CC'] = 'cl'
env['CCFLAGS'] = SCons.Util.CLVar('/nologo')
env['CFLAGS'] = SCons.Util.CLVar('')
env['CCCOM'] = '${TEMPFILE("$CC $_MSVC_OUTPUT_FLAG /c $CHANGED_SOURCES $CFLAGS $CCFLAGS $_CCCOMCOM","$CCCOMSTR")}'
env['SHCC'] = '$CC'
env['SHCCFLAGS'] = SCons.Util.CLVar('$CCFLAGS')
env['SHCFLAGS'] = SCons.Util.CLVar('$CFLAGS')
env['SHCCCOM'] = '${TEMPFILE("$SHCC $_MSVC_OUTPUT_FLAG /c $CHANGED_SOURCES $SHCFLAGS $SHCCFLAGS $_CCCOMCOM","$SHCCCOMSTR")}'
env['CXX'] = '$CC'
env['CXXFLAGS'] = SCons.Util.CLVar('$( /TP $)')
env['CXXCOM'] = '${TEMPFILE("$CXX $_MSVC_OUTPUT_FLAG /c $CHANGED_SOURCES $CXXFLAGS $CCFLAGS $_CCCOMCOM","$CXXCOMSTR")}'
env['SHCXX'] = '$CXX'
env['SHCXXFLAGS'] = SCons.Util.CLVar('$CXXFLAGS')
env['SHCXXCOM'] = '${TEMPFILE("$SHCXX $_MSVC_OUTPUT_FLAG /c $CHANGED_SOURCES $SHCXXFLAGS $SHCCFLAGS $_CCCOMCOM","$SHCXXCOMSTR")}'
env['CPPDEFPREFIX'] = '/D'
env['CPPDEFSUFFIX'] = ''
env['INCPREFIX'] = '/I'
env['INCSUFFIX'] = ''
# env.Append(OBJEMITTER = [static_object_emitter])
# env.Append(SHOBJEMITTER = [shared_object_emitter])
env['STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME'] = 1
env['RC'] = 'rc'
env['RCFLAGS'] = SCons.Util.CLVar('')
env['RCSUFFIXES']=['.rc','.rc2']
env['RCCOM'] = '$RC $_CPPDEFFLAGS $_CPPINCFLAGS $RCFLAGS /fo$TARGET $SOURCES'
env['BUILDERS']['RES'] = res_builder
env['OBJPREFIX'] = ''
env['OBJSUFFIX'] = '.obj'
env['SHOBJPREFIX'] = '$OBJPREFIX'
env['SHOBJSUFFIX'] = '$OBJSUFFIX'
# Set-up ms tools paths
msvc_setup_env_once(env)
env['CFILESUFFIX'] = '.c'
env['CXXFILESUFFIX'] = '.cc'
msvc_set_PCHPDBFLAGS(env)
env['PCHCOM'] = '$CXX /Fo${TARGETS[1]} $CXXFLAGS $CCFLAGS $CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS /c $SOURCES /Yc$PCHSTOP /Fp${TARGETS[0]} $CCPDBFLAGS $PCHPDBFLAGS'
env['BUILDERS']['PCH'] = pch_builder
if 'ENV' not in env:
env['ENV'] = {}
if 'SystemRoot' not in env['ENV']: # required for dlls in the winsxs folders
env['ENV']['SystemRoot'] = SCons.Platform.win32.get_system_root() | python | def generate(env):
static_obj, shared_obj = SCons.Tool.createObjBuilders(env)
# TODO(batch): shouldn't reach in to cmdgen this way; necessary
# for now to bypass the checks in Builder.DictCmdGenerator.__call__()
# and allow .cc and .cpp to be compiled in the same command line.
static_obj.cmdgen.source_ext_match = False
shared_obj.cmdgen.source_ext_match = False
for suffix in CSuffixes:
static_obj.add_action(suffix, CAction)
shared_obj.add_action(suffix, ShCAction)
static_obj.add_emitter(suffix, static_object_emitter)
shared_obj.add_emitter(suffix, shared_object_emitter)
for suffix in CXXSuffixes:
static_obj.add_action(suffix, CXXAction)
shared_obj.add_action(suffix, ShCXXAction)
static_obj.add_emitter(suffix, static_object_emitter)
shared_obj.add_emitter(suffix, shared_object_emitter)
env['CCPDBFLAGS'] = SCons.Util.CLVar(['${(PDB and "/Z7") or ""}'])
env['CCPCHFLAGS'] = SCons.Util.CLVar(['${(PCH and "/Yu%s \\\"/Fp%s\\\""%(PCHSTOP or "",File(PCH))) or ""}'])
env['_MSVC_OUTPUT_FLAG'] = msvc_output_flag
env['_CCCOMCOM'] = '$CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS $CCPCHFLAGS $CCPDBFLAGS'
env['CC'] = 'cl'
env['CCFLAGS'] = SCons.Util.CLVar('/nologo')
env['CFLAGS'] = SCons.Util.CLVar('')
env['CCCOM'] = '${TEMPFILE("$CC $_MSVC_OUTPUT_FLAG /c $CHANGED_SOURCES $CFLAGS $CCFLAGS $_CCCOMCOM","$CCCOMSTR")}'
env['SHCC'] = '$CC'
env['SHCCFLAGS'] = SCons.Util.CLVar('$CCFLAGS')
env['SHCFLAGS'] = SCons.Util.CLVar('$CFLAGS')
env['SHCCCOM'] = '${TEMPFILE("$SHCC $_MSVC_OUTPUT_FLAG /c $CHANGED_SOURCES $SHCFLAGS $SHCCFLAGS $_CCCOMCOM","$SHCCCOMSTR")}'
env['CXX'] = '$CC'
env['CXXFLAGS'] = SCons.Util.CLVar('$( /TP $)')
env['CXXCOM'] = '${TEMPFILE("$CXX $_MSVC_OUTPUT_FLAG /c $CHANGED_SOURCES $CXXFLAGS $CCFLAGS $_CCCOMCOM","$CXXCOMSTR")}'
env['SHCXX'] = '$CXX'
env['SHCXXFLAGS'] = SCons.Util.CLVar('$CXXFLAGS')
env['SHCXXCOM'] = '${TEMPFILE("$SHCXX $_MSVC_OUTPUT_FLAG /c $CHANGED_SOURCES $SHCXXFLAGS $SHCCFLAGS $_CCCOMCOM","$SHCXXCOMSTR")}'
env['CPPDEFPREFIX'] = '/D'
env['CPPDEFSUFFIX'] = ''
env['INCPREFIX'] = '/I'
env['INCSUFFIX'] = ''
# env.Append(OBJEMITTER = [static_object_emitter])
# env.Append(SHOBJEMITTER = [shared_object_emitter])
env['STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME'] = 1
env['RC'] = 'rc'
env['RCFLAGS'] = SCons.Util.CLVar('')
env['RCSUFFIXES']=['.rc','.rc2']
env['RCCOM'] = '$RC $_CPPDEFFLAGS $_CPPINCFLAGS $RCFLAGS /fo$TARGET $SOURCES'
env['BUILDERS']['RES'] = res_builder
env['OBJPREFIX'] = ''
env['OBJSUFFIX'] = '.obj'
env['SHOBJPREFIX'] = '$OBJPREFIX'
env['SHOBJSUFFIX'] = '$OBJSUFFIX'
# Set-up ms tools paths
msvc_setup_env_once(env)
env['CFILESUFFIX'] = '.c'
env['CXXFILESUFFIX'] = '.cc'
msvc_set_PCHPDBFLAGS(env)
env['PCHCOM'] = '$CXX /Fo${TARGETS[1]} $CXXFLAGS $CCFLAGS $CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS /c $SOURCES /Yc$PCHSTOP /Fp${TARGETS[0]} $CCPDBFLAGS $PCHPDBFLAGS'
env['BUILDERS']['PCH'] = pch_builder
if 'ENV' not in env:
env['ENV'] = {}
if 'SystemRoot' not in env['ENV']: # required for dlls in the winsxs folders
env['ENV']['SystemRoot'] = SCons.Platform.win32.get_system_root() | [
"def",
"generate",
"(",
"env",
")",
":",
"static_obj",
",",
"shared_obj",
"=",
"SCons",
".",
"Tool",
".",
"createObjBuilders",
"(",
"env",
")",
"# TODO(batch): shouldn't reach in to cmdgen this way; necessary",
"# for now to bypass the checks in Builder.DictCmdGenerator.__call... | Add Builders and construction variables for MSVC++ to an Environment. | [
"Add",
"Builders",
"and",
"construction",
"variables",
"for",
"MSVC",
"++",
"to",
"an",
"Environment",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/msvc.py#L213-L286 |
23,639 | iotile/coretools | iotileship/iotile/ship/resources/hardware_manager.py | HardwareManagerResource.open | def open(self):
"""Open and potentially connect to a device."""
self.hwman = HardwareManager(port=self._port)
self.opened = True
if self._connection_string is not None:
try:
self.hwman.connect_direct(self._connection_string)
except HardwareError:
self.hwman.close()
raise
elif self._connect_id is not None:
try:
self.hwman.connect(self._connect_id)
except HardwareError:
self.hwman.close()
raise | python | def open(self):
self.hwman = HardwareManager(port=self._port)
self.opened = True
if self._connection_string is not None:
try:
self.hwman.connect_direct(self._connection_string)
except HardwareError:
self.hwman.close()
raise
elif self._connect_id is not None:
try:
self.hwman.connect(self._connect_id)
except HardwareError:
self.hwman.close()
raise | [
"def",
"open",
"(",
"self",
")",
":",
"self",
".",
"hwman",
"=",
"HardwareManager",
"(",
"port",
"=",
"self",
".",
"_port",
")",
"self",
".",
"opened",
"=",
"True",
"if",
"self",
".",
"_connection_string",
"is",
"not",
"None",
":",
"try",
":",
"self"... | Open and potentially connect to a device. | [
"Open",
"and",
"potentially",
"connect",
"to",
"a",
"device",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotileship/iotile/ship/resources/hardware_manager.py#L46-L64 |
23,640 | iotile/coretools | iotileship/iotile/ship/resources/hardware_manager.py | HardwareManagerResource.close | def close(self):
"""Close and potentially disconnect from a device."""
if self.hwman.stream.connected:
self.hwman.disconnect()
self.hwman.close()
self.opened = False | python | def close(self):
if self.hwman.stream.connected:
self.hwman.disconnect()
self.hwman.close()
self.opened = False | [
"def",
"close",
"(",
"self",
")",
":",
"if",
"self",
".",
"hwman",
".",
"stream",
".",
"connected",
":",
"self",
".",
"hwman",
".",
"disconnect",
"(",
")",
"self",
".",
"hwman",
".",
"close",
"(",
")",
"self",
".",
"opened",
"=",
"False"
] | Close and potentially disconnect from a device. | [
"Close",
"and",
"potentially",
"disconnect",
"from",
"a",
"device",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotileship/iotile/ship/resources/hardware_manager.py#L67-L74 |
23,641 | iotile/coretools | iotilebuild/iotile/build/config/site_scons/pythondist.py | get_support_package | def get_support_package(tile):
"""Returns the support_package product."""
packages = tile.find_products('support_package')
if len(packages) == 0:
return None
elif len(packages) == 1:
return packages[0]
raise BuildError("Tile declared multiple support packages, only one is supported", packages=packages) | python | def get_support_package(tile):
packages = tile.find_products('support_package')
if len(packages) == 0:
return None
elif len(packages) == 1:
return packages[0]
raise BuildError("Tile declared multiple support packages, only one is supported", packages=packages) | [
"def",
"get_support_package",
"(",
"tile",
")",
":",
"packages",
"=",
"tile",
".",
"find_products",
"(",
"'support_package'",
")",
"if",
"len",
"(",
"packages",
")",
"==",
"0",
":",
"return",
"None",
"elif",
"len",
"(",
"packages",
")",
"==",
"1",
":",
... | Returns the support_package product. | [
"Returns",
"the",
"support_package",
"product",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/site_scons/pythondist.py#L30-L39 |
23,642 | iotile/coretools | iotilebuild/iotile/build/config/site_scons/pythondist.py | iter_support_files | def iter_support_files(tile):
"""Iterate over all files that go in the support wheel.
This method has two possible behaviors. If there is a 'support_package'
product defined, then this recursively enumerates all .py files inside
that folder and adds them all in the same hierarchy to the support wheel.
If there is no support_package product defined, then the old behavior
takes over, where all files containing python entrypoints are iterated
over using iter_python_modules() and they are copied into the support
wheel and then an __init__.py file is added.
The files are yielded as tuples of (copy_name, input_path).
"""
support_package = get_support_package(tile)
if support_package is None:
for module, _, _ in iter_python_modules(tile):
yield os.path.basename(module), module
else:
for dirpath, _dirnames, filenames in os.walk(support_package):
for filename in filenames:
if not filename.endswith('.py'):
continue
input_path = os.path.join(dirpath, filename)
output_path = os.path.relpath(input_path, start=support_package)
if output_path == "__init__.py":
continue
yield output_path, input_path | python | def iter_support_files(tile):
support_package = get_support_package(tile)
if support_package is None:
for module, _, _ in iter_python_modules(tile):
yield os.path.basename(module), module
else:
for dirpath, _dirnames, filenames in os.walk(support_package):
for filename in filenames:
if not filename.endswith('.py'):
continue
input_path = os.path.join(dirpath, filename)
output_path = os.path.relpath(input_path, start=support_package)
if output_path == "__init__.py":
continue
yield output_path, input_path | [
"def",
"iter_support_files",
"(",
"tile",
")",
":",
"support_package",
"=",
"get_support_package",
"(",
"tile",
")",
"if",
"support_package",
"is",
"None",
":",
"for",
"module",
",",
"_",
",",
"_",
"in",
"iter_python_modules",
"(",
"tile",
")",
":",
"yield",... | Iterate over all files that go in the support wheel.
This method has two possible behaviors. If there is a 'support_package'
product defined, then this recursively enumerates all .py files inside
that folder and adds them all in the same hierarchy to the support wheel.
If there is no support_package product defined, then the old behavior
takes over, where all files containing python entrypoints are iterated
over using iter_python_modules() and they are copied into the support
wheel and then an __init__.py file is added.
The files are yielded as tuples of (copy_name, input_path). | [
"Iterate",
"over",
"all",
"files",
"that",
"go",
"in",
"the",
"support",
"wheel",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/site_scons/pythondist.py#L42-L73 |
23,643 | iotile/coretools | iotilebuild/iotile/build/config/site_scons/pythondist.py | iter_python_modules | def iter_python_modules(tile):
"""Iterate over all python products in the given tile.
This will yield tuples where the first entry is the path to the module
containing the product the second entry is the appropriate
import string to include in an entry point, and the third entry is
the entry point name.
"""
for product_type in tile.PYTHON_PRODUCTS:
for product in tile.find_products(product_type):
entry_point = ENTRY_POINT_MAP.get(product_type)
if entry_point is None:
raise BuildError("Found an unknown python product (%s) whose entrypoint could not be determined (%s)" %
(product_type, product))
if ':' in product:
module, _, obj_name = product.rpartition(':')
else:
module = product
obj_name = None
if not os.path.exists(module):
raise BuildError("Found a python product whose path did not exist: %s" % module)
product_name = os.path.basename(module)
if product_name.endswith(".py"):
product_name = product_name[:-3]
import_string = "{} = {}.{}".format(product_name, tile.support_distribution, product_name)
if obj_name is not None:
import_string += ":{}".format(obj_name)
yield (module, import_string, entry_point) | python | def iter_python_modules(tile):
for product_type in tile.PYTHON_PRODUCTS:
for product in tile.find_products(product_type):
entry_point = ENTRY_POINT_MAP.get(product_type)
if entry_point is None:
raise BuildError("Found an unknown python product (%s) whose entrypoint could not be determined (%s)" %
(product_type, product))
if ':' in product:
module, _, obj_name = product.rpartition(':')
else:
module = product
obj_name = None
if not os.path.exists(module):
raise BuildError("Found a python product whose path did not exist: %s" % module)
product_name = os.path.basename(module)
if product_name.endswith(".py"):
product_name = product_name[:-3]
import_string = "{} = {}.{}".format(product_name, tile.support_distribution, product_name)
if obj_name is not None:
import_string += ":{}".format(obj_name)
yield (module, import_string, entry_point) | [
"def",
"iter_python_modules",
"(",
"tile",
")",
":",
"for",
"product_type",
"in",
"tile",
".",
"PYTHON_PRODUCTS",
":",
"for",
"product",
"in",
"tile",
".",
"find_products",
"(",
"product_type",
")",
":",
"entry_point",
"=",
"ENTRY_POINT_MAP",
".",
"get",
"(",
... | Iterate over all python products in the given tile.
This will yield tuples where the first entry is the path to the module
containing the product the second entry is the appropriate
import string to include in an entry point, and the third entry is
the entry point name. | [
"Iterate",
"over",
"all",
"python",
"products",
"in",
"the",
"given",
"tile",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/site_scons/pythondist.py#L76-L110 |
23,644 | iotile/coretools | iotilebuild/iotile/build/config/site_scons/pythondist.py | generate_setup_py | def generate_setup_py(target, source, env):
"""Generate the setup.py file for this distribution."""
tile = env['TILE']
data = {}
entry_points = {}
for _mod, import_string, entry_point in iter_python_modules(tile):
if entry_point not in entry_points:
entry_points[entry_point] = []
entry_points[entry_point].append(import_string)
data['name'] = tile.support_distribution
data['package'] = tile.support_distribution
data['version'] = tile.parsed_version.pep440_string()
data['deps'] = ["{0} {1}".format(x.support_distribution, x.parsed_version.pep440_compatibility_specifier())
for x in _iter_dependencies(tile) if x.has_wheel]
# If there are some python packages needed, we add them to the list of dependencies required
if tile.support_wheel_depends:
data['deps'] += tile.support_wheel_depends
data['entry_points'] = entry_points
outdir = os.path.dirname(str(target[0]))
render_template('setup.py.tpl', data, out_path=str(target[0]))
# Run setuptools to generate a wheel and an sdist
curr = os.getcwd()
os.chdir(outdir)
try:
setuptools.sandbox.run_setup('setup.py', ['-q', 'clean', 'sdist'])
if "python_universal" in tile.settings:
setuptools.sandbox.run_setup('setup.py', ['-q', 'clean', 'bdist_wheel', '--universal'])
else:
setuptools.sandbox.run_setup('setup.py', ['-q', 'clean', 'bdist_wheel'])
finally:
os.chdir(curr) | python | def generate_setup_py(target, source, env):
tile = env['TILE']
data = {}
entry_points = {}
for _mod, import_string, entry_point in iter_python_modules(tile):
if entry_point not in entry_points:
entry_points[entry_point] = []
entry_points[entry_point].append(import_string)
data['name'] = tile.support_distribution
data['package'] = tile.support_distribution
data['version'] = tile.parsed_version.pep440_string()
data['deps'] = ["{0} {1}".format(x.support_distribution, x.parsed_version.pep440_compatibility_specifier())
for x in _iter_dependencies(tile) if x.has_wheel]
# If there are some python packages needed, we add them to the list of dependencies required
if tile.support_wheel_depends:
data['deps'] += tile.support_wheel_depends
data['entry_points'] = entry_points
outdir = os.path.dirname(str(target[0]))
render_template('setup.py.tpl', data, out_path=str(target[0]))
# Run setuptools to generate a wheel and an sdist
curr = os.getcwd()
os.chdir(outdir)
try:
setuptools.sandbox.run_setup('setup.py', ['-q', 'clean', 'sdist'])
if "python_universal" in tile.settings:
setuptools.sandbox.run_setup('setup.py', ['-q', 'clean', 'bdist_wheel', '--universal'])
else:
setuptools.sandbox.run_setup('setup.py', ['-q', 'clean', 'bdist_wheel'])
finally:
os.chdir(curr) | [
"def",
"generate_setup_py",
"(",
"target",
",",
"source",
",",
"env",
")",
":",
"tile",
"=",
"env",
"[",
"'TILE'",
"]",
"data",
"=",
"{",
"}",
"entry_points",
"=",
"{",
"}",
"for",
"_mod",
",",
"import_string",
",",
"entry_point",
"in",
"iter_python_modu... | Generate the setup.py file for this distribution. | [
"Generate",
"the",
"setup",
".",
"py",
"file",
"for",
"this",
"distribution",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/site_scons/pythondist.py#L197-L237 |
23,645 | iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/rpmutils.py | defaultMachine | def defaultMachine(use_rpm_default=True):
""" Return the canonicalized machine name. """
if use_rpm_default:
try:
# This should be the most reliable way to get the default arch
rmachine = subprocess.check_output(['rpm', '--eval=%_target_cpu'], shell=False).rstrip()
rmachine = SCons.Util.to_str(rmachine)
except Exception as e:
# Something went wrong, try again by looking up platform.machine()
return defaultMachine(False)
else:
rmachine = platform.machine()
# Try to lookup the string in the canon table
if rmachine in arch_canon:
rmachine = arch_canon[rmachine][0]
return rmachine | python | def defaultMachine(use_rpm_default=True):
if use_rpm_default:
try:
# This should be the most reliable way to get the default arch
rmachine = subprocess.check_output(['rpm', '--eval=%_target_cpu'], shell=False).rstrip()
rmachine = SCons.Util.to_str(rmachine)
except Exception as e:
# Something went wrong, try again by looking up platform.machine()
return defaultMachine(False)
else:
rmachine = platform.machine()
# Try to lookup the string in the canon table
if rmachine in arch_canon:
rmachine = arch_canon[rmachine][0]
return rmachine | [
"def",
"defaultMachine",
"(",
"use_rpm_default",
"=",
"True",
")",
":",
"if",
"use_rpm_default",
":",
"try",
":",
"# This should be the most reliable way to get the default arch",
"rmachine",
"=",
"subprocess",
".",
"check_output",
"(",
"[",
"'rpm'",
",",
"'--eval=%_tar... | Return the canonicalized machine name. | [
"Return",
"the",
"canonicalized",
"machine",
"name",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/rpmutils.py#L441-L459 |
23,646 | iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/rpmutils.py | defaultSystem | def defaultSystem():
""" Return the canonicalized system name. """
rsystem = platform.system()
# Try to lookup the string in the canon tables
if rsystem in os_canon:
rsystem = os_canon[rsystem][0]
return rsystem | python | def defaultSystem():
rsystem = platform.system()
# Try to lookup the string in the canon tables
if rsystem in os_canon:
rsystem = os_canon[rsystem][0]
return rsystem | [
"def",
"defaultSystem",
"(",
")",
":",
"rsystem",
"=",
"platform",
".",
"system",
"(",
")",
"# Try to lookup the string in the canon tables",
"if",
"rsystem",
"in",
"os_canon",
":",
"rsystem",
"=",
"os_canon",
"[",
"rsystem",
"]",
"[",
"0",
"]",
"return",
"rsy... | Return the canonicalized system name. | [
"Return",
"the",
"canonicalized",
"system",
"name",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/rpmutils.py#L461-L469 |
23,647 | iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Taskmaster.py | Task.prepare | def prepare(self):
"""
Called just before the task is executed.
This is mainly intended to give the target Nodes a chance to
unlink underlying files and make all necessary directories before
the Action is actually called to build the targets.
"""
global print_prepare
T = self.tm.trace
if T: T.write(self.trace_message(u'Task.prepare()', self.node))
# Now that it's the appropriate time, give the TaskMaster a
# chance to raise any exceptions it encountered while preparing
# this task.
self.exception_raise()
if self.tm.message:
self.display(self.tm.message)
self.tm.message = None
# Let the targets take care of any necessary preparations.
# This includes verifying that all of the necessary sources
# and dependencies exist, removing the target file(s), etc.
#
# As of April 2008, the get_executor().prepare() method makes
# sure that all of the aggregate sources necessary to build this
# Task's target(s) exist in one up-front check. The individual
# target t.prepare() methods check that each target's explicit
# or implicit dependencies exists, and also initialize the
# .sconsign info.
executor = self.targets[0].get_executor()
if executor is None:
return
executor.prepare()
for t in executor.get_action_targets():
if print_prepare:
print("Preparing target %s..."%t)
for s in t.side_effects:
print("...with side-effect %s..."%s)
t.prepare()
for s in t.side_effects:
if print_prepare:
print("...Preparing side-effect %s..."%s)
s.prepare() | python | def prepare(self):
global print_prepare
T = self.tm.trace
if T: T.write(self.trace_message(u'Task.prepare()', self.node))
# Now that it's the appropriate time, give the TaskMaster a
# chance to raise any exceptions it encountered while preparing
# this task.
self.exception_raise()
if self.tm.message:
self.display(self.tm.message)
self.tm.message = None
# Let the targets take care of any necessary preparations.
# This includes verifying that all of the necessary sources
# and dependencies exist, removing the target file(s), etc.
#
# As of April 2008, the get_executor().prepare() method makes
# sure that all of the aggregate sources necessary to build this
# Task's target(s) exist in one up-front check. The individual
# target t.prepare() methods check that each target's explicit
# or implicit dependencies exists, and also initialize the
# .sconsign info.
executor = self.targets[0].get_executor()
if executor is None:
return
executor.prepare()
for t in executor.get_action_targets():
if print_prepare:
print("Preparing target %s..."%t)
for s in t.side_effects:
print("...with side-effect %s..."%s)
t.prepare()
for s in t.side_effects:
if print_prepare:
print("...Preparing side-effect %s..."%s)
s.prepare() | [
"def",
"prepare",
"(",
"self",
")",
":",
"global",
"print_prepare",
"T",
"=",
"self",
".",
"tm",
".",
"trace",
"if",
"T",
":",
"T",
".",
"write",
"(",
"self",
".",
"trace_message",
"(",
"u'Task.prepare()'",
",",
"self",
".",
"node",
")",
")",
"# Now ... | Called just before the task is executed.
This is mainly intended to give the target Nodes a chance to
unlink underlying files and make all necessary directories before
the Action is actually called to build the targets. | [
"Called",
"just",
"before",
"the",
"task",
"is",
"executed",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Taskmaster.py#L163-L207 |
23,648 | iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Taskmaster.py | Task.execute | def execute(self):
"""
Called to execute the task.
This method is called from multiple threads in a parallel build,
so only do thread safe stuff here. Do thread unsafe stuff in
prepare(), executed() or failed().
"""
T = self.tm.trace
if T: T.write(self.trace_message(u'Task.execute()', self.node))
try:
cached_targets = []
for t in self.targets:
if not t.retrieve_from_cache():
break
cached_targets.append(t)
if len(cached_targets) < len(self.targets):
# Remove targets before building. It's possible that we
# partially retrieved targets from the cache, leaving
# them in read-only mode. That might cause the command
# to fail.
#
for t in cached_targets:
try:
t.fs.unlink(t.get_internal_path())
except (IOError, OSError):
pass
self.targets[0].build()
else:
for t in cached_targets:
t.cached = 1
except SystemExit:
exc_value = sys.exc_info()[1]
raise SCons.Errors.ExplicitExit(self.targets[0], exc_value.code)
except SCons.Errors.UserError:
raise
except SCons.Errors.BuildError:
raise
except Exception as e:
buildError = SCons.Errors.convert_to_BuildError(e)
buildError.node = self.targets[0]
buildError.exc_info = sys.exc_info()
raise buildError | python | def execute(self):
T = self.tm.trace
if T: T.write(self.trace_message(u'Task.execute()', self.node))
try:
cached_targets = []
for t in self.targets:
if not t.retrieve_from_cache():
break
cached_targets.append(t)
if len(cached_targets) < len(self.targets):
# Remove targets before building. It's possible that we
# partially retrieved targets from the cache, leaving
# them in read-only mode. That might cause the command
# to fail.
#
for t in cached_targets:
try:
t.fs.unlink(t.get_internal_path())
except (IOError, OSError):
pass
self.targets[0].build()
else:
for t in cached_targets:
t.cached = 1
except SystemExit:
exc_value = sys.exc_info()[1]
raise SCons.Errors.ExplicitExit(self.targets[0], exc_value.code)
except SCons.Errors.UserError:
raise
except SCons.Errors.BuildError:
raise
except Exception as e:
buildError = SCons.Errors.convert_to_BuildError(e)
buildError.node = self.targets[0]
buildError.exc_info = sys.exc_info()
raise buildError | [
"def",
"execute",
"(",
"self",
")",
":",
"T",
"=",
"self",
".",
"tm",
".",
"trace",
"if",
"T",
":",
"T",
".",
"write",
"(",
"self",
".",
"trace_message",
"(",
"u'Task.execute()'",
",",
"self",
".",
"node",
")",
")",
"try",
":",
"cached_targets",
"=... | Called to execute the task.
This method is called from multiple threads in a parallel build,
so only do thread safe stuff here. Do thread unsafe stuff in
prepare(), executed() or failed(). | [
"Called",
"to",
"execute",
"the",
"task",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Taskmaster.py#L226-L269 |
23,649 | iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Taskmaster.py | Task.executed_without_callbacks | def executed_without_callbacks(self):
"""
Called when the task has been successfully executed
and the Taskmaster instance doesn't want to call
the Node's callback methods.
"""
T = self.tm.trace
if T: T.write(self.trace_message('Task.executed_without_callbacks()',
self.node))
for t in self.targets:
if t.get_state() == NODE_EXECUTING:
for side_effect in t.side_effects:
side_effect.set_state(NODE_NO_STATE)
t.set_state(NODE_EXECUTED) | python | def executed_without_callbacks(self):
T = self.tm.trace
if T: T.write(self.trace_message('Task.executed_without_callbacks()',
self.node))
for t in self.targets:
if t.get_state() == NODE_EXECUTING:
for side_effect in t.side_effects:
side_effect.set_state(NODE_NO_STATE)
t.set_state(NODE_EXECUTED) | [
"def",
"executed_without_callbacks",
"(",
"self",
")",
":",
"T",
"=",
"self",
".",
"tm",
".",
"trace",
"if",
"T",
":",
"T",
".",
"write",
"(",
"self",
".",
"trace_message",
"(",
"'Task.executed_without_callbacks()'",
",",
"self",
".",
"node",
")",
")",
"... | Called when the task has been successfully executed
and the Taskmaster instance doesn't want to call
the Node's callback methods. | [
"Called",
"when",
"the",
"task",
"has",
"been",
"successfully",
"executed",
"and",
"the",
"Taskmaster",
"instance",
"doesn",
"t",
"want",
"to",
"call",
"the",
"Node",
"s",
"callback",
"methods",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Taskmaster.py#L271-L285 |
23,650 | iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Taskmaster.py | Task.executed_with_callbacks | def executed_with_callbacks(self):
"""
Called when the task has been successfully executed and
the Taskmaster instance wants to call the Node's callback
methods.
This may have been a do-nothing operation (to preserve build
order), so we must check the node's state before deciding whether
it was "built", in which case we call the appropriate Node method.
In any event, we always call "visited()", which will handle any
post-visit actions that must take place regardless of whether
or not the target was an actual built target or a source Node.
"""
global print_prepare
T = self.tm.trace
if T: T.write(self.trace_message('Task.executed_with_callbacks()',
self.node))
for t in self.targets:
if t.get_state() == NODE_EXECUTING:
for side_effect in t.side_effects:
side_effect.set_state(NODE_NO_STATE)
t.set_state(NODE_EXECUTED)
if not t.cached:
t.push_to_cache()
t.built()
t.visited()
if (not print_prepare and
(not hasattr(self, 'options') or not self.options.debug_includes)):
t.release_target_info()
else:
t.visited() | python | def executed_with_callbacks(self):
global print_prepare
T = self.tm.trace
if T: T.write(self.trace_message('Task.executed_with_callbacks()',
self.node))
for t in self.targets:
if t.get_state() == NODE_EXECUTING:
for side_effect in t.side_effects:
side_effect.set_state(NODE_NO_STATE)
t.set_state(NODE_EXECUTED)
if not t.cached:
t.push_to_cache()
t.built()
t.visited()
if (not print_prepare and
(not hasattr(self, 'options') or not self.options.debug_includes)):
t.release_target_info()
else:
t.visited() | [
"def",
"executed_with_callbacks",
"(",
"self",
")",
":",
"global",
"print_prepare",
"T",
"=",
"self",
".",
"tm",
".",
"trace",
"if",
"T",
":",
"T",
".",
"write",
"(",
"self",
".",
"trace_message",
"(",
"'Task.executed_with_callbacks()'",
",",
"self",
".",
... | Called when the task has been successfully executed and
the Taskmaster instance wants to call the Node's callback
methods.
This may have been a do-nothing operation (to preserve build
order), so we must check the node's state before deciding whether
it was "built", in which case we call the appropriate Node method.
In any event, we always call "visited()", which will handle any
post-visit actions that must take place regardless of whether
or not the target was an actual built target or a source Node. | [
"Called",
"when",
"the",
"task",
"has",
"been",
"successfully",
"executed",
"and",
"the",
"Taskmaster",
"instance",
"wants",
"to",
"call",
"the",
"Node",
"s",
"callback",
"methods",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Taskmaster.py#L287-L318 |
23,651 | iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Taskmaster.py | Task.fail_stop | def fail_stop(self):
"""
Explicit stop-the-build failure.
This sets failure status on the target nodes and all of
their dependent parent nodes.
Note: Although this function is normally invoked on nodes in
the executing state, it might also be invoked on up-to-date
nodes when using Configure().
"""
T = self.tm.trace
if T: T.write(self.trace_message('Task.failed_stop()', self.node))
# Invoke will_not_build() to clean-up the pending children
# list.
self.tm.will_not_build(self.targets, lambda n: n.set_state(NODE_FAILED))
# Tell the taskmaster to not start any new tasks
self.tm.stop()
# We're stopping because of a build failure, but give the
# calling Task class a chance to postprocess() the top-level
# target under which the build failure occurred.
self.targets = [self.tm.current_top]
self.top = 1 | python | def fail_stop(self):
T = self.tm.trace
if T: T.write(self.trace_message('Task.failed_stop()', self.node))
# Invoke will_not_build() to clean-up the pending children
# list.
self.tm.will_not_build(self.targets, lambda n: n.set_state(NODE_FAILED))
# Tell the taskmaster to not start any new tasks
self.tm.stop()
# We're stopping because of a build failure, but give the
# calling Task class a chance to postprocess() the top-level
# target under which the build failure occurred.
self.targets = [self.tm.current_top]
self.top = 1 | [
"def",
"fail_stop",
"(",
"self",
")",
":",
"T",
"=",
"self",
".",
"tm",
".",
"trace",
"if",
"T",
":",
"T",
".",
"write",
"(",
"self",
".",
"trace_message",
"(",
"'Task.failed_stop()'",
",",
"self",
".",
"node",
")",
")",
"# Invoke will_not_build() to cle... | Explicit stop-the-build failure.
This sets failure status on the target nodes and all of
their dependent parent nodes.
Note: Although this function is normally invoked on nodes in
the executing state, it might also be invoked on up-to-date
nodes when using Configure(). | [
"Explicit",
"stop",
"-",
"the",
"-",
"build",
"failure",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Taskmaster.py#L332-L357 |
23,652 | iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Taskmaster.py | Task.fail_continue | def fail_continue(self):
"""
Explicit continue-the-build failure.
This sets failure status on the target nodes and all of
their dependent parent nodes.
Note: Although this function is normally invoked on nodes in
the executing state, it might also be invoked on up-to-date
nodes when using Configure().
"""
T = self.tm.trace
if T: T.write(self.trace_message('Task.failed_continue()', self.node))
self.tm.will_not_build(self.targets, lambda n: n.set_state(NODE_FAILED)) | python | def fail_continue(self):
T = self.tm.trace
if T: T.write(self.trace_message('Task.failed_continue()', self.node))
self.tm.will_not_build(self.targets, lambda n: n.set_state(NODE_FAILED)) | [
"def",
"fail_continue",
"(",
"self",
")",
":",
"T",
"=",
"self",
".",
"tm",
".",
"trace",
"if",
"T",
":",
"T",
".",
"write",
"(",
"self",
".",
"trace_message",
"(",
"'Task.failed_continue()'",
",",
"self",
".",
"node",
")",
")",
"self",
".",
"tm",
... | Explicit continue-the-build failure.
This sets failure status on the target nodes and all of
their dependent parent nodes.
Note: Although this function is normally invoked on nodes in
the executing state, it might also be invoked on up-to-date
nodes when using Configure(). | [
"Explicit",
"continue",
"-",
"the",
"-",
"build",
"failure",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Taskmaster.py#L359-L373 |
23,653 | iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Taskmaster.py | Task.make_ready_all | def make_ready_all(self):
"""
Marks all targets in a task ready for execution.
This is used when the interface needs every target Node to be
visited--the canonical example being the "scons -c" option.
"""
T = self.tm.trace
if T: T.write(self.trace_message('Task.make_ready_all()', self.node))
self.out_of_date = self.targets[:]
for t in self.targets:
t.disambiguate().set_state(NODE_EXECUTING)
for s in t.side_effects:
# add disambiguate here to mirror the call on targets above
s.disambiguate().set_state(NODE_EXECUTING) | python | def make_ready_all(self):
T = self.tm.trace
if T: T.write(self.trace_message('Task.make_ready_all()', self.node))
self.out_of_date = self.targets[:]
for t in self.targets:
t.disambiguate().set_state(NODE_EXECUTING)
for s in t.side_effects:
# add disambiguate here to mirror the call on targets above
s.disambiguate().set_state(NODE_EXECUTING) | [
"def",
"make_ready_all",
"(",
"self",
")",
":",
"T",
"=",
"self",
".",
"tm",
".",
"trace",
"if",
"T",
":",
"T",
".",
"write",
"(",
"self",
".",
"trace_message",
"(",
"'Task.make_ready_all()'",
",",
"self",
".",
"node",
")",
")",
"self",
".",
"out_of_... | Marks all targets in a task ready for execution.
This is used when the interface needs every target Node to be
visited--the canonical example being the "scons -c" option. | [
"Marks",
"all",
"targets",
"in",
"a",
"task",
"ready",
"for",
"execution",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Taskmaster.py#L375-L390 |
23,654 | iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Taskmaster.py | Task.make_ready_current | def make_ready_current(self):
"""
Marks all targets in a task ready for execution if any target
is not current.
This is the default behavior for building only what's necessary.
"""
global print_prepare
T = self.tm.trace
if T: T.write(self.trace_message(u'Task.make_ready_current()',
self.node))
self.out_of_date = []
needs_executing = False
for t in self.targets:
try:
t.disambiguate().make_ready()
is_up_to_date = not t.has_builder() or \
(not t.always_build and t.is_up_to_date())
except EnvironmentError as e:
raise SCons.Errors.BuildError(node=t, errstr=e.strerror, filename=e.filename)
if not is_up_to_date:
self.out_of_date.append(t)
needs_executing = True
if needs_executing:
for t in self.targets:
t.set_state(NODE_EXECUTING)
for s in t.side_effects:
# add disambiguate here to mirror the call on targets in first loop above
s.disambiguate().set_state(NODE_EXECUTING)
else:
for t in self.targets:
# We must invoke visited() to ensure that the node
# information has been computed before allowing the
# parent nodes to execute. (That could occur in a
# parallel build...)
t.visited()
t.set_state(NODE_UP_TO_DATE)
if (not print_prepare and
(not hasattr(self, 'options') or not self.options.debug_includes)):
t.release_target_info() | python | def make_ready_current(self):
global print_prepare
T = self.tm.trace
if T: T.write(self.trace_message(u'Task.make_ready_current()',
self.node))
self.out_of_date = []
needs_executing = False
for t in self.targets:
try:
t.disambiguate().make_ready()
is_up_to_date = not t.has_builder() or \
(not t.always_build and t.is_up_to_date())
except EnvironmentError as e:
raise SCons.Errors.BuildError(node=t, errstr=e.strerror, filename=e.filename)
if not is_up_to_date:
self.out_of_date.append(t)
needs_executing = True
if needs_executing:
for t in self.targets:
t.set_state(NODE_EXECUTING)
for s in t.side_effects:
# add disambiguate here to mirror the call on targets in first loop above
s.disambiguate().set_state(NODE_EXECUTING)
else:
for t in self.targets:
# We must invoke visited() to ensure that the node
# information has been computed before allowing the
# parent nodes to execute. (That could occur in a
# parallel build...)
t.visited()
t.set_state(NODE_UP_TO_DATE)
if (not print_prepare and
(not hasattr(self, 'options') or not self.options.debug_includes)):
t.release_target_info() | [
"def",
"make_ready_current",
"(",
"self",
")",
":",
"global",
"print_prepare",
"T",
"=",
"self",
".",
"tm",
".",
"trace",
"if",
"T",
":",
"T",
".",
"write",
"(",
"self",
".",
"trace_message",
"(",
"u'Task.make_ready_current()'",
",",
"self",
".",
"node",
... | Marks all targets in a task ready for execution if any target
is not current.
This is the default behavior for building only what's necessary. | [
"Marks",
"all",
"targets",
"in",
"a",
"task",
"ready",
"for",
"execution",
"if",
"any",
"target",
"is",
"not",
"current",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Taskmaster.py#L392-L434 |
23,655 | iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Taskmaster.py | Task.postprocess | def postprocess(self):
"""
Post-processes a task after it's been executed.
This examines all the targets just built (or not, we don't care
if the build was successful, or even if there was no build
because everything was up-to-date) to see if they have any
waiting parent Nodes, or Nodes waiting on a common side effect,
that can be put back on the candidates list.
"""
T = self.tm.trace
if T: T.write(self.trace_message(u'Task.postprocess()', self.node))
# We may have built multiple targets, some of which may have
# common parents waiting for this build. Count up how many
# targets each parent was waiting for so we can subtract the
# values later, and so we *don't* put waiting side-effect Nodes
# back on the candidates list if the Node is also a waiting
# parent.
targets = set(self.targets)
pending_children = self.tm.pending_children
parents = {}
for t in targets:
# A node can only be in the pending_children set if it has
# some waiting_parents.
if t.waiting_parents:
if T: T.write(self.trace_message(u'Task.postprocess()',
t,
'removing'))
pending_children.discard(t)
for p in t.waiting_parents:
parents[p] = parents.get(p, 0) + 1
for t in targets:
if t.side_effects is not None:
for s in t.side_effects:
if s.get_state() == NODE_EXECUTING:
s.set_state(NODE_NO_STATE)
for p in s.waiting_parents:
parents[p] = parents.get(p, 0) + 1
for p in s.waiting_s_e:
if p.ref_count == 0:
self.tm.candidates.append(p)
for p, subtract in parents.items():
p.ref_count = p.ref_count - subtract
if T: T.write(self.trace_message(u'Task.postprocess()',
p,
'adjusted parent ref count'))
if p.ref_count == 0:
self.tm.candidates.append(p)
for t in targets:
t.postprocess() | python | def postprocess(self):
T = self.tm.trace
if T: T.write(self.trace_message(u'Task.postprocess()', self.node))
# We may have built multiple targets, some of which may have
# common parents waiting for this build. Count up how many
# targets each parent was waiting for so we can subtract the
# values later, and so we *don't* put waiting side-effect Nodes
# back on the candidates list if the Node is also a waiting
# parent.
targets = set(self.targets)
pending_children = self.tm.pending_children
parents = {}
for t in targets:
# A node can only be in the pending_children set if it has
# some waiting_parents.
if t.waiting_parents:
if T: T.write(self.trace_message(u'Task.postprocess()',
t,
'removing'))
pending_children.discard(t)
for p in t.waiting_parents:
parents[p] = parents.get(p, 0) + 1
for t in targets:
if t.side_effects is not None:
for s in t.side_effects:
if s.get_state() == NODE_EXECUTING:
s.set_state(NODE_NO_STATE)
for p in s.waiting_parents:
parents[p] = parents.get(p, 0) + 1
for p in s.waiting_s_e:
if p.ref_count == 0:
self.tm.candidates.append(p)
for p, subtract in parents.items():
p.ref_count = p.ref_count - subtract
if T: T.write(self.trace_message(u'Task.postprocess()',
p,
'adjusted parent ref count'))
if p.ref_count == 0:
self.tm.candidates.append(p)
for t in targets:
t.postprocess() | [
"def",
"postprocess",
"(",
"self",
")",
":",
"T",
"=",
"self",
".",
"tm",
".",
"trace",
"if",
"T",
":",
"T",
".",
"write",
"(",
"self",
".",
"trace_message",
"(",
"u'Task.postprocess()'",
",",
"self",
".",
"node",
")",
")",
"# We may have built multiple ... | Post-processes a task after it's been executed.
This examines all the targets just built (or not, we don't care
if the build was successful, or even if there was no build
because everything was up-to-date) to see if they have any
waiting parent Nodes, or Nodes waiting on a common side effect,
that can be put back on the candidates list. | [
"Post",
"-",
"processes",
"a",
"task",
"after",
"it",
"s",
"been",
"executed",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Taskmaster.py#L438-L493 |
23,656 | iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Taskmaster.py | Task.exception_set | def exception_set(self, exception=None):
"""
Records an exception to be raised at the appropriate time.
This also changes the "exception_raise" attribute to point
to the method that will, in fact
"""
if not exception:
exception = sys.exc_info()
self.exception = exception
self.exception_raise = self._exception_raise | python | def exception_set(self, exception=None):
if not exception:
exception = sys.exc_info()
self.exception = exception
self.exception_raise = self._exception_raise | [
"def",
"exception_set",
"(",
"self",
",",
"exception",
"=",
"None",
")",
":",
"if",
"not",
"exception",
":",
"exception",
"=",
"sys",
".",
"exc_info",
"(",
")",
"self",
".",
"exception",
"=",
"exception",
"self",
".",
"exception_raise",
"=",
"self",
".",... | Records an exception to be raised at the appropriate time.
This also changes the "exception_raise" attribute to point
to the method that will, in fact | [
"Records",
"an",
"exception",
"to",
"be",
"raised",
"at",
"the",
"appropriate",
"time",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Taskmaster.py#L520-L530 |
23,657 | iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Taskmaster.py | Task._exception_raise | def _exception_raise(self):
"""
Raises a pending exception that was recorded while getting a
Task ready for execution.
"""
exc = self.exc_info()[:]
try:
exc_type, exc_value, exc_traceback = exc
except ValueError:
exc_type, exc_value = exc
exc_traceback = None
# raise exc_type(exc_value).with_traceback(exc_traceback)
if sys.version_info[0] == 2:
exec("raise exc_type, exc_value, exc_traceback")
else: # sys.version_info[0] == 3:
if isinstance(exc_value, Exception): #hasattr(exc_value, 'with_traceback'):
# If exc_value is an exception, then just reraise
exec("raise exc_value.with_traceback(exc_traceback)")
else:
# else we'll create an exception using the value and raise that
exec("raise exc_type(exc_value).with_traceback(exc_traceback)") | python | def _exception_raise(self):
exc = self.exc_info()[:]
try:
exc_type, exc_value, exc_traceback = exc
except ValueError:
exc_type, exc_value = exc
exc_traceback = None
# raise exc_type(exc_value).with_traceback(exc_traceback)
if sys.version_info[0] == 2:
exec("raise exc_type, exc_value, exc_traceback")
else: # sys.version_info[0] == 3:
if isinstance(exc_value, Exception): #hasattr(exc_value, 'with_traceback'):
# If exc_value is an exception, then just reraise
exec("raise exc_value.with_traceback(exc_traceback)")
else:
# else we'll create an exception using the value and raise that
exec("raise exc_type(exc_value).with_traceback(exc_traceback)") | [
"def",
"_exception_raise",
"(",
"self",
")",
":",
"exc",
"=",
"self",
".",
"exc_info",
"(",
")",
"[",
":",
"]",
"try",
":",
"exc_type",
",",
"exc_value",
",",
"exc_traceback",
"=",
"exc",
"except",
"ValueError",
":",
"exc_type",
",",
"exc_value",
"=",
... | Raises a pending exception that was recorded while getting a
Task ready for execution. | [
"Raises",
"a",
"pending",
"exception",
"that",
"was",
"recorded",
"while",
"getting",
"a",
"Task",
"ready",
"for",
"execution",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Taskmaster.py#L535-L556 |
23,658 | iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Taskmaster.py | Taskmaster.no_next_candidate | def no_next_candidate(self):
"""
Stops Taskmaster processing by not returning a next candidate.
Note that we have to clean-up the Taskmaster candidate list
because the cycle detection depends on the fact all nodes have
been processed somehow.
"""
while self.candidates:
candidates = self.candidates
self.candidates = []
self.will_not_build(candidates)
return None | python | def no_next_candidate(self):
while self.candidates:
candidates = self.candidates
self.candidates = []
self.will_not_build(candidates)
return None | [
"def",
"no_next_candidate",
"(",
"self",
")",
":",
"while",
"self",
".",
"candidates",
":",
"candidates",
"=",
"self",
".",
"candidates",
"self",
".",
"candidates",
"=",
"[",
"]",
"self",
".",
"will_not_build",
"(",
"candidates",
")",
"return",
"None"
] | Stops Taskmaster processing by not returning a next candidate.
Note that we have to clean-up the Taskmaster candidate list
because the cycle detection depends on the fact all nodes have
been processed somehow. | [
"Stops",
"Taskmaster",
"processing",
"by",
"not",
"returning",
"a",
"next",
"candidate",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Taskmaster.py#L660-L672 |
23,659 | iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Taskmaster.py | Taskmaster._validate_pending_children | def _validate_pending_children(self):
"""
Validate the content of the pending_children set. Assert if an
internal error is found.
This function is used strictly for debugging the taskmaster by
checking that no invariants are violated. It is not used in
normal operation.
The pending_children set is used to detect cycles in the
dependency graph. We call a "pending child" a child that is
found in the "pending" state when checking the dependencies of
its parent node.
A pending child can occur when the Taskmaster completes a loop
through a cycle. For example, let's imagine a graph made of
three nodes (A, B and C) making a cycle. The evaluation starts
at node A. The Taskmaster first considers whether node A's
child B is up-to-date. Then, recursively, node B needs to
check whether node C is up-to-date. This leaves us with a
dependency graph looking like::
Next candidate \
\
Node A (Pending) --> Node B(Pending) --> Node C (NoState)
^ |
| |
+-------------------------------------+
Now, when the Taskmaster examines the Node C's child Node A,
it finds that Node A is in the "pending" state. Therefore,
Node A is a pending child of node C.
Pending children indicate that the Taskmaster has potentially
loop back through a cycle. We say potentially because it could
also occur when a DAG is evaluated in parallel. For example,
consider the following graph::
Node A (Pending) --> Node B(Pending) --> Node C (Pending) --> ...
| ^
| |
+----------> Node D (NoState) --------+
/
Next candidate /
The Taskmaster first evaluates the nodes A, B, and C and
starts building some children of node C. Assuming, that the
maximum parallel level has not been reached, the Taskmaster
will examine Node D. It will find that Node C is a pending
child of Node D.
In summary, evaluating a graph with a cycle will always
involve a pending child at one point. A pending child might
indicate either a cycle or a diamond-shaped DAG. Only a
fraction of the nodes ends-up being a "pending child" of
another node. This keeps the pending_children set small in
practice.
We can differentiate between the two cases if we wait until
the end of the build. At this point, all the pending children
nodes due to a diamond-shaped DAG will have been properly
built (or will have failed to build). But, the pending
children involved in a cycle will still be in the pending
state.
The taskmaster removes nodes from the pending_children set as
soon as a pending_children node moves out of the pending
state. This also helps to keep the pending_children set small.
"""
for n in self.pending_children:
assert n.state in (NODE_PENDING, NODE_EXECUTING), \
(str(n), StateString[n.state])
assert len(n.waiting_parents) != 0, (str(n), len(n.waiting_parents))
for p in n.waiting_parents:
assert p.ref_count > 0, (str(n), str(p), p.ref_count) | python | def _validate_pending_children(self):
for n in self.pending_children:
assert n.state in (NODE_PENDING, NODE_EXECUTING), \
(str(n), StateString[n.state])
assert len(n.waiting_parents) != 0, (str(n), len(n.waiting_parents))
for p in n.waiting_parents:
assert p.ref_count > 0, (str(n), str(p), p.ref_count) | [
"def",
"_validate_pending_children",
"(",
"self",
")",
":",
"for",
"n",
"in",
"self",
".",
"pending_children",
":",
"assert",
"n",
".",
"state",
"in",
"(",
"NODE_PENDING",
",",
"NODE_EXECUTING",
")",
",",
"(",
"str",
"(",
"n",
")",
",",
"StateString",
"[... | Validate the content of the pending_children set. Assert if an
internal error is found.
This function is used strictly for debugging the taskmaster by
checking that no invariants are violated. It is not used in
normal operation.
The pending_children set is used to detect cycles in the
dependency graph. We call a "pending child" a child that is
found in the "pending" state when checking the dependencies of
its parent node.
A pending child can occur when the Taskmaster completes a loop
through a cycle. For example, let's imagine a graph made of
three nodes (A, B and C) making a cycle. The evaluation starts
at node A. The Taskmaster first considers whether node A's
child B is up-to-date. Then, recursively, node B needs to
check whether node C is up-to-date. This leaves us with a
dependency graph looking like::
Next candidate \
\
Node A (Pending) --> Node B(Pending) --> Node C (NoState)
^ |
| |
+-------------------------------------+
Now, when the Taskmaster examines the Node C's child Node A,
it finds that Node A is in the "pending" state. Therefore,
Node A is a pending child of node C.
Pending children indicate that the Taskmaster has potentially
loop back through a cycle. We say potentially because it could
also occur when a DAG is evaluated in parallel. For example,
consider the following graph::
Node A (Pending) --> Node B(Pending) --> Node C (Pending) --> ...
| ^
| |
+----------> Node D (NoState) --------+
/
Next candidate /
The Taskmaster first evaluates the nodes A, B, and C and
starts building some children of node C. Assuming, that the
maximum parallel level has not been reached, the Taskmaster
will examine Node D. It will find that Node C is a pending
child of Node D.
In summary, evaluating a graph with a cycle will always
involve a pending child at one point. A pending child might
indicate either a cycle or a diamond-shaped DAG. Only a
fraction of the nodes ends-up being a "pending child" of
another node. This keeps the pending_children set small in
practice.
We can differentiate between the two cases if we wait until
the end of the build. At this point, all the pending children
nodes due to a diamond-shaped DAG will have been properly
built (or will have failed to build). But, the pending
children involved in a cycle will still be in the pending
state.
The taskmaster removes nodes from the pending_children set as
soon as a pending_children node moves out of the pending
state. This also helps to keep the pending_children set small. | [
"Validate",
"the",
"content",
"of",
"the",
"pending_children",
"set",
".",
"Assert",
"if",
"an",
"internal",
"error",
"is",
"found",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Taskmaster.py#L674-L749 |
23,660 | iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Taskmaster.py | Taskmaster.next_task | def next_task(self):
"""
Returns the next task to be executed.
This simply asks for the next Node to be evaluated, and then wraps
it in the specific Task subclass with which we were initialized.
"""
node = self._find_next_ready_node()
if node is None:
return None
executor = node.get_executor()
if executor is None:
return None
tlist = executor.get_all_targets()
task = self.tasker(self, tlist, node in self.original_top, node)
try:
task.make_ready()
except Exception as e :
# We had a problem just trying to get this task ready (like
# a child couldn't be linked to a VariantDir when deciding
# whether this node is current). Arrange to raise the
# exception when the Task is "executed."
self.ready_exc = sys.exc_info()
if self.ready_exc:
task.exception_set(self.ready_exc)
self.ready_exc = None
return task | python | def next_task(self):
node = self._find_next_ready_node()
if node is None:
return None
executor = node.get_executor()
if executor is None:
return None
tlist = executor.get_all_targets()
task = self.tasker(self, tlist, node in self.original_top, node)
try:
task.make_ready()
except Exception as e :
# We had a problem just trying to get this task ready (like
# a child couldn't be linked to a VariantDir when deciding
# whether this node is current). Arrange to raise the
# exception when the Task is "executed."
self.ready_exc = sys.exc_info()
if self.ready_exc:
task.exception_set(self.ready_exc)
self.ready_exc = None
return task | [
"def",
"next_task",
"(",
"self",
")",
":",
"node",
"=",
"self",
".",
"_find_next_ready_node",
"(",
")",
"if",
"node",
"is",
"None",
":",
"return",
"None",
"executor",
"=",
"node",
".",
"get_executor",
"(",
")",
"if",
"executor",
"is",
"None",
":",
"ret... | Returns the next task to be executed.
This simply asks for the next Node to be evaluated, and then wraps
it in the specific Task subclass with which we were initialized. | [
"Returns",
"the",
"next",
"task",
"to",
"be",
"executed",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Taskmaster.py#L952-L985 |
23,661 | iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Taskmaster.py | Taskmaster.cleanup | def cleanup(self):
"""
Check for dependency cycles.
"""
if not self.pending_children:
return
nclist = [(n, find_cycle([n], set())) for n in self.pending_children]
genuine_cycles = [
node for node,cycle in nclist
if cycle or node.get_state() != NODE_EXECUTED
]
if not genuine_cycles:
# All of the "cycles" found were single nodes in EXECUTED state,
# which is to say, they really weren't cycles. Just return.
return
desc = 'Found dependency cycle(s):\n'
for node, cycle in nclist:
if cycle:
desc = desc + " " + " -> ".join(map(str, cycle)) + "\n"
else:
desc = desc + \
" Internal Error: no cycle found for node %s (%s) in state %s\n" % \
(node, repr(node), StateString[node.get_state()])
raise SCons.Errors.UserError(desc) | python | def cleanup(self):
if not self.pending_children:
return
nclist = [(n, find_cycle([n], set())) for n in self.pending_children]
genuine_cycles = [
node for node,cycle in nclist
if cycle or node.get_state() != NODE_EXECUTED
]
if not genuine_cycles:
# All of the "cycles" found were single nodes in EXECUTED state,
# which is to say, they really weren't cycles. Just return.
return
desc = 'Found dependency cycle(s):\n'
for node, cycle in nclist:
if cycle:
desc = desc + " " + " -> ".join(map(str, cycle)) + "\n"
else:
desc = desc + \
" Internal Error: no cycle found for node %s (%s) in state %s\n" % \
(node, repr(node), StateString[node.get_state()])
raise SCons.Errors.UserError(desc) | [
"def",
"cleanup",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"pending_children",
":",
"return",
"nclist",
"=",
"[",
"(",
"n",
",",
"find_cycle",
"(",
"[",
"n",
"]",
",",
"set",
"(",
")",
")",
")",
"for",
"n",
"in",
"self",
".",
"pending_chi... | Check for dependency cycles. | [
"Check",
"for",
"dependency",
"cycles",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Taskmaster.py#L1037-L1064 |
23,662 | iotile/coretools | iotilebuild/iotile/build/dev/resolverchain.py | DependencyResolverChain.instantiate_resolver | def instantiate_resolver(self, name, args):
"""Directly instantiate a dependency resolver by name with the given arguments
Args:
name (string): The name of the class that we want to instantiate
args (dict): The arguments to pass to the resolver factory
Returns:
DependencyResolver
"""
if name not in self._known_resolvers:
raise ArgumentError("Attempting to instantiate unknown dependency resolver", name=name)
return self._known_resolvers[name](args) | python | def instantiate_resolver(self, name, args):
if name not in self._known_resolvers:
raise ArgumentError("Attempting to instantiate unknown dependency resolver", name=name)
return self._known_resolvers[name](args) | [
"def",
"instantiate_resolver",
"(",
"self",
",",
"name",
",",
"args",
")",
":",
"if",
"name",
"not",
"in",
"self",
".",
"_known_resolvers",
":",
"raise",
"ArgumentError",
"(",
"\"Attempting to instantiate unknown dependency resolver\"",
",",
"name",
"=",
"name",
"... | Directly instantiate a dependency resolver by name with the given arguments
Args:
name (string): The name of the class that we want to instantiate
args (dict): The arguments to pass to the resolver factory
Returns:
DependencyResolver | [
"Directly",
"instantiate",
"a",
"dependency",
"resolver",
"by",
"name",
"with",
"the",
"given",
"arguments"
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/dev/resolverchain.py#L53-L66 |
23,663 | iotile/coretools | iotilebuild/iotile/build/dev/resolverchain.py | DependencyResolverChain.pull_release | def pull_release(self, name, version, destfolder=".", force=False):
"""Download and unpack a released iotile component by name and version range
If the folder that would be created already exists, this command fails unless
you pass force=True
Args:
name (string): The name of the component to download
version (SemanticVersionRange): The valid versions of the component to fetch
destfolder (string): The folder into which to unpack the result, defaults to
the current working directory
force (bool): Forcibly overwrite whatever is currently in the folder that would
be fetched.
Raises:
ExternalError: If the destination folder exists and force is not specified
ArgumentError: If the specified component could not be found with the required version
"""
unique_id = name.replace('/', '_')
depdict = {
'name': name,
'unique_id': unique_id,
'required_version': version,
'required_version_string': str(version)
}
destdir = os.path.join(destfolder, unique_id)
if os.path.exists(destdir):
if not force:
raise ExternalError("Output directory exists and force was not specified, aborting",
output_directory=destdir)
shutil.rmtree(destdir)
result = self.update_dependency(None, depdict, destdir)
if result != "installed":
raise ArgumentError("Could not find component to satisfy name/version combination") | python | def pull_release(self, name, version, destfolder=".", force=False):
unique_id = name.replace('/', '_')
depdict = {
'name': name,
'unique_id': unique_id,
'required_version': version,
'required_version_string': str(version)
}
destdir = os.path.join(destfolder, unique_id)
if os.path.exists(destdir):
if not force:
raise ExternalError("Output directory exists and force was not specified, aborting",
output_directory=destdir)
shutil.rmtree(destdir)
result = self.update_dependency(None, depdict, destdir)
if result != "installed":
raise ArgumentError("Could not find component to satisfy name/version combination") | [
"def",
"pull_release",
"(",
"self",
",",
"name",
",",
"version",
",",
"destfolder",
"=",
"\".\"",
",",
"force",
"=",
"False",
")",
":",
"unique_id",
"=",
"name",
".",
"replace",
"(",
"'/'",
",",
"'_'",
")",
"depdict",
"=",
"{",
"'name'",
":",
"name",... | Download and unpack a released iotile component by name and version range
If the folder that would be created already exists, this command fails unless
you pass force=True
Args:
name (string): The name of the component to download
version (SemanticVersionRange): The valid versions of the component to fetch
destfolder (string): The folder into which to unpack the result, defaults to
the current working directory
force (bool): Forcibly overwrite whatever is currently in the folder that would
be fetched.
Raises:
ExternalError: If the destination folder exists and force is not specified
ArgumentError: If the specified component could not be found with the required version | [
"Download",
"and",
"unpack",
"a",
"released",
"iotile",
"component",
"by",
"name",
"and",
"version",
"range"
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/dev/resolverchain.py#L68-L106 |
23,664 | iotile/coretools | iotilebuild/iotile/build/dev/resolverchain.py | DependencyResolverChain.update_dependency | def update_dependency(self, tile, depinfo, destdir=None):
"""Attempt to install or update a dependency to the latest version.
Args:
tile (IOTile): An IOTile object describing the tile that has the dependency
depinfo (dict): a dictionary from tile.dependencies specifying the dependency
destdir (string): An optional folder into which to unpack the dependency
Returns:
string: a string indicating the outcome. Possible values are:
"already installed"
"installed"
"updated"
"not found"
"""
if destdir is None:
destdir = os.path.join(tile.folder, 'build', 'deps', depinfo['unique_id'])
has_version = False
had_version = False
if os.path.exists(destdir):
has_version = True
had_version = True
for priority, rule in self.rules:
if not self._check_rule(rule, depinfo):
continue
resolver = self._find_resolver(rule)
if has_version:
deptile = IOTile(destdir)
# If the dependency is not up to date, don't do anything
depstatus = self._check_dep(depinfo, deptile, resolver)
if depstatus is False:
shutil.rmtree(destdir)
has_version = False
else:
continue
# Now try to resolve this dependency with the latest version
result = resolver.resolve(depinfo, destdir)
if not result['found'] and result.get('stop', False):
return 'not found'
if not result['found']:
continue
settings = {
'resolver': resolver.__class__.__name__,
'factory_args': rule[2]
}
if 'settings' in result:
settings['settings'] = result['settings']
self._save_depsettings(destdir, settings)
if had_version:
return "updated"
return "installed"
if has_version:
return "already installed"
return "not found" | python | def update_dependency(self, tile, depinfo, destdir=None):
if destdir is None:
destdir = os.path.join(tile.folder, 'build', 'deps', depinfo['unique_id'])
has_version = False
had_version = False
if os.path.exists(destdir):
has_version = True
had_version = True
for priority, rule in self.rules:
if not self._check_rule(rule, depinfo):
continue
resolver = self._find_resolver(rule)
if has_version:
deptile = IOTile(destdir)
# If the dependency is not up to date, don't do anything
depstatus = self._check_dep(depinfo, deptile, resolver)
if depstatus is False:
shutil.rmtree(destdir)
has_version = False
else:
continue
# Now try to resolve this dependency with the latest version
result = resolver.resolve(depinfo, destdir)
if not result['found'] and result.get('stop', False):
return 'not found'
if not result['found']:
continue
settings = {
'resolver': resolver.__class__.__name__,
'factory_args': rule[2]
}
if 'settings' in result:
settings['settings'] = result['settings']
self._save_depsettings(destdir, settings)
if had_version:
return "updated"
return "installed"
if has_version:
return "already installed"
return "not found" | [
"def",
"update_dependency",
"(",
"self",
",",
"tile",
",",
"depinfo",
",",
"destdir",
"=",
"None",
")",
":",
"if",
"destdir",
"is",
"None",
":",
"destdir",
"=",
"os",
".",
"path",
".",
"join",
"(",
"tile",
".",
"folder",
",",
"'build'",
",",
"'deps'"... | Attempt to install or update a dependency to the latest version.
Args:
tile (IOTile): An IOTile object describing the tile that has the dependency
depinfo (dict): a dictionary from tile.dependencies specifying the dependency
destdir (string): An optional folder into which to unpack the dependency
Returns:
string: a string indicating the outcome. Possible values are:
"already installed"
"installed"
"updated"
"not found" | [
"Attempt",
"to",
"install",
"or",
"update",
"a",
"dependency",
"to",
"the",
"latest",
"version",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/dev/resolverchain.py#L108-L176 |
23,665 | iotile/coretools | iotilebuild/iotile/build/dev/resolverchain.py | DependencyResolverChain._check_dep | def _check_dep(self, depinfo, deptile, resolver):
"""Check if a dependency tile is up to date
Returns:
bool: True if it is up to date, False if it not and None if this resolver
cannot assess whether or not it is up to date.
"""
try:
settings = self._load_depsettings(deptile)
except IOError:
return False
# If this dependency was initially resolved with a different resolver, then
# we cannot check if it is up to date
if settings['resolver'] != resolver.__class__.__name__:
return None
resolver_settings = {}
if 'settings' in settings:
resolver_settings = settings['settings']
return resolver.check(depinfo, deptile, resolver_settings) | python | def _check_dep(self, depinfo, deptile, resolver):
try:
settings = self._load_depsettings(deptile)
except IOError:
return False
# If this dependency was initially resolved with a different resolver, then
# we cannot check if it is up to date
if settings['resolver'] != resolver.__class__.__name__:
return None
resolver_settings = {}
if 'settings' in settings:
resolver_settings = settings['settings']
return resolver.check(depinfo, deptile, resolver_settings) | [
"def",
"_check_dep",
"(",
"self",
",",
"depinfo",
",",
"deptile",
",",
"resolver",
")",
":",
"try",
":",
"settings",
"=",
"self",
".",
"_load_depsettings",
"(",
"deptile",
")",
"except",
"IOError",
":",
"return",
"False",
"# If this dependency was initially reso... | Check if a dependency tile is up to date
Returns:
bool: True if it is up to date, False if it not and None if this resolver
cannot assess whether or not it is up to date. | [
"Check",
"if",
"a",
"dependency",
"tile",
"is",
"up",
"to",
"date"
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/dev/resolverchain.py#L192-L214 |
23,666 | iotile/coretools | iotilecore/iotile/core/utilities/async_tools/event_loop.py | _log_future_exception | def _log_future_exception(future, logger):
"""Log any exception raised by future."""
if not future.done():
return
try:
future.result()
except: #pylint:disable=bare-except;This is a background logging helper
logger.warning("Exception in ignored future: %s", future, exc_info=True) | python | def _log_future_exception(future, logger):
if not future.done():
return
try:
future.result()
except: #pylint:disable=bare-except;This is a background logging helper
logger.warning("Exception in ignored future: %s", future, exc_info=True) | [
"def",
"_log_future_exception",
"(",
"future",
",",
"logger",
")",
":",
"if",
"not",
"future",
".",
"done",
"(",
")",
":",
"return",
"try",
":",
"future",
".",
"result",
"(",
")",
"except",
":",
"#pylint:disable=bare-except;This is a background logging helper",
... | Log any exception raised by future. | [
"Log",
"any",
"exception",
"raised",
"by",
"future",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/utilities/async_tools/event_loop.py#L647-L656 |
23,667 | iotile/coretools | iotilecore/iotile/core/utilities/async_tools/event_loop.py | BackgroundTask.create_subtask | def create_subtask(self, cor, name=None, stop_timeout=1.0):
"""Create and add a subtask from a coroutine.
This function will create a BackgroundTask and then
call self.add_subtask() on it.
Args:
cor (coroutine): The coroutine that should be wrapped
in a background task.
name (str): An optional name for the task.
stop_timeout (float): The maximum time to wait for this
subtask to die after stopping it.
Returns:
Backgroundtask: The created subtask.
"""
if self.stopped:
raise InternalError("Cannot add a subtask to a parent that is already stopped")
subtask = BackgroundTask(cor, name, loop=self._loop, stop_timeout=stop_timeout)
self.add_subtask(subtask)
return subtask | python | def create_subtask(self, cor, name=None, stop_timeout=1.0):
if self.stopped:
raise InternalError("Cannot add a subtask to a parent that is already stopped")
subtask = BackgroundTask(cor, name, loop=self._loop, stop_timeout=stop_timeout)
self.add_subtask(subtask)
return subtask | [
"def",
"create_subtask",
"(",
"self",
",",
"cor",
",",
"name",
"=",
"None",
",",
"stop_timeout",
"=",
"1.0",
")",
":",
"if",
"self",
".",
"stopped",
":",
"raise",
"InternalError",
"(",
"\"Cannot add a subtask to a parent that is already stopped\"",
")",
"subtask",... | Create and add a subtask from a coroutine.
This function will create a BackgroundTask and then
call self.add_subtask() on it.
Args:
cor (coroutine): The coroutine that should be wrapped
in a background task.
name (str): An optional name for the task.
stop_timeout (float): The maximum time to wait for this
subtask to die after stopping it.
Returns:
Backgroundtask: The created subtask. | [
"Create",
"and",
"add",
"a",
"subtask",
"from",
"a",
"coroutine",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/utilities/async_tools/event_loop.py#L113-L135 |
23,668 | iotile/coretools | iotilecore/iotile/core/utilities/async_tools/event_loop.py | BackgroundTask.add_subtask | def add_subtask(self, subtask):
"""Link a subtask to this parent task.
This will cause stop() to block until the subtask has also
finished. Calling stop will not directly cancel the subtask.
It is expected that your finalizer for this parent task will
cancel or otherwise stop the subtask.
Args:
subtask (BackgroundTask): Another task that will be stopped
when this task is stopped.
"""
if self.stopped:
raise InternalError("Cannot add a subtask to a parent that is already stopped")
if not isinstance(subtask, BackgroundTask):
raise ArgumentError("Subtasks must inherit from BackgroundTask, task={}".format(subtask))
#pylint:disable=protected-access;It is the same class as us so is equivalent to self access.
if subtask._loop != self._loop:
raise ArgumentError("Subtasks must run in the same BackgroundEventLoop as their parent",
subtask=subtask, parent=self)
self.subtasks.append(subtask) | python | def add_subtask(self, subtask):
if self.stopped:
raise InternalError("Cannot add a subtask to a parent that is already stopped")
if not isinstance(subtask, BackgroundTask):
raise ArgumentError("Subtasks must inherit from BackgroundTask, task={}".format(subtask))
#pylint:disable=protected-access;It is the same class as us so is equivalent to self access.
if subtask._loop != self._loop:
raise ArgumentError("Subtasks must run in the same BackgroundEventLoop as their parent",
subtask=subtask, parent=self)
self.subtasks.append(subtask) | [
"def",
"add_subtask",
"(",
"self",
",",
"subtask",
")",
":",
"if",
"self",
".",
"stopped",
":",
"raise",
"InternalError",
"(",
"\"Cannot add a subtask to a parent that is already stopped\"",
")",
"if",
"not",
"isinstance",
"(",
"subtask",
",",
"BackgroundTask",
")",... | Link a subtask to this parent task.
This will cause stop() to block until the subtask has also
finished. Calling stop will not directly cancel the subtask.
It is expected that your finalizer for this parent task will
cancel or otherwise stop the subtask.
Args:
subtask (BackgroundTask): Another task that will be stopped
when this task is stopped. | [
"Link",
"a",
"subtask",
"to",
"this",
"parent",
"task",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/utilities/async_tools/event_loop.py#L137-L161 |
23,669 | iotile/coretools | iotilecore/iotile/core/utilities/async_tools/event_loop.py | BackgroundTask.stop | async def stop(self):
"""Stop this task and wait until it and all its subtasks end.
This function will finalize this task either by using the finalizer
function passed during creation or by calling task.cancel() if no
finalizer was passed.
It will then call join() on this task and any registered subtasks
with the given maximum timeout, raising asyncio.TimeoutError if
the tasks did not exit within the given timeout.
This method should only be called once.
After this method returns, the task is finished and no more subtasks
can be added. If this task is being tracked inside of the
BackgroundEventLoop that it is part of, it will automatically be
removed from the event loop's list of tasks.
"""
if self.stopped:
return
self._logger.debug("Stopping task %s", self.name)
if self._finalizer is not None:
try:
result = self._finalizer(self)
if inspect.isawaitable(result):
await result
except: #pylint:disable=bare-except;We need to make sure we always wait for the task
self._logger.exception("Error running finalizer for task %s",
self.name)
elif self.task is not None:
self.task.cancel()
tasks = []
if self.task is not None:
tasks.append(self.task)
tasks.extend(x.task for x in self.subtasks)
finished = asyncio.gather(*tasks, return_exceptions=True)
outcomes = []
try:
outcomes = await asyncio.wait_for(finished, timeout=self._stop_timeout)
except asyncio.TimeoutError as err:
# See discussion here: https://github.com/python/asyncio/issues/253#issuecomment-120138132
# This prevents a nuisance log error message, finished is guaranteed
# to be cancelled but not awaited when wait_for() has a timeout.
try:
outcomes = await finished
except asyncio.CancelledError:
pass
# See https://mail.python.org/pipermail/python-3000/2008-May/013740.html
# for why we need to explictly name the error here
raise err
finally:
self.stopped = True
for outcome in outcomes:
if isinstance(outcome, Exception) and not isinstance(outcome, asyncio.CancelledError):
self._logger.error(outcome)
if self in self._loop.tasks:
self._loop.tasks.remove(self) | python | async def stop(self):
if self.stopped:
return
self._logger.debug("Stopping task %s", self.name)
if self._finalizer is not None:
try:
result = self._finalizer(self)
if inspect.isawaitable(result):
await result
except: #pylint:disable=bare-except;We need to make sure we always wait for the task
self._logger.exception("Error running finalizer for task %s",
self.name)
elif self.task is not None:
self.task.cancel()
tasks = []
if self.task is not None:
tasks.append(self.task)
tasks.extend(x.task for x in self.subtasks)
finished = asyncio.gather(*tasks, return_exceptions=True)
outcomes = []
try:
outcomes = await asyncio.wait_for(finished, timeout=self._stop_timeout)
except asyncio.TimeoutError as err:
# See discussion here: https://github.com/python/asyncio/issues/253#issuecomment-120138132
# This prevents a nuisance log error message, finished is guaranteed
# to be cancelled but not awaited when wait_for() has a timeout.
try:
outcomes = await finished
except asyncio.CancelledError:
pass
# See https://mail.python.org/pipermail/python-3000/2008-May/013740.html
# for why we need to explictly name the error here
raise err
finally:
self.stopped = True
for outcome in outcomes:
if isinstance(outcome, Exception) and not isinstance(outcome, asyncio.CancelledError):
self._logger.error(outcome)
if self in self._loop.tasks:
self._loop.tasks.remove(self) | [
"async",
"def",
"stop",
"(",
"self",
")",
":",
"if",
"self",
".",
"stopped",
":",
"return",
"self",
".",
"_logger",
".",
"debug",
"(",
"\"Stopping task %s\"",
",",
"self",
".",
"name",
")",
"if",
"self",
".",
"_finalizer",
"is",
"not",
"None",
":",
"... | Stop this task and wait until it and all its subtasks end.
This function will finalize this task either by using the finalizer
function passed during creation or by calling task.cancel() if no
finalizer was passed.
It will then call join() on this task and any registered subtasks
with the given maximum timeout, raising asyncio.TimeoutError if
the tasks did not exit within the given timeout.
This method should only be called once.
After this method returns, the task is finished and no more subtasks
can be added. If this task is being tracked inside of the
BackgroundEventLoop that it is part of, it will automatically be
removed from the event loop's list of tasks. | [
"Stop",
"this",
"task",
"and",
"wait",
"until",
"it",
"and",
"all",
"its",
"subtasks",
"end",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/utilities/async_tools/event_loop.py#L163-L227 |
23,670 | iotile/coretools | iotilecore/iotile/core/utilities/async_tools/event_loop.py | BackgroundTask.stop_threadsafe | def stop_threadsafe(self):
"""Stop this task from another thread and wait for it to finish.
This method must not be called from within the BackgroundEventLoop but
will inject self.stop() into the event loop and block until it
returns.
Raises:
TimeoutExpiredError: If the task does not stop in the given
timeout specified in __init__()
"""
if self.stopped:
return
try:
self._loop.run_coroutine(self.stop())
except asyncio.TimeoutError:
raise TimeoutExpiredError("Timeout stopping task {} with {} subtasks".format(self.name, len(self.subtasks))) | python | def stop_threadsafe(self):
if self.stopped:
return
try:
self._loop.run_coroutine(self.stop())
except asyncio.TimeoutError:
raise TimeoutExpiredError("Timeout stopping task {} with {} subtasks".format(self.name, len(self.subtasks))) | [
"def",
"stop_threadsafe",
"(",
"self",
")",
":",
"if",
"self",
".",
"stopped",
":",
"return",
"try",
":",
"self",
".",
"_loop",
".",
"run_coroutine",
"(",
"self",
".",
"stop",
"(",
")",
")",
"except",
"asyncio",
".",
"TimeoutError",
":",
"raise",
"Time... | Stop this task from another thread and wait for it to finish.
This method must not be called from within the BackgroundEventLoop but
will inject self.stop() into the event loop and block until it
returns.
Raises:
TimeoutExpiredError: If the task does not stop in the given
timeout specified in __init__() | [
"Stop",
"this",
"task",
"from",
"another",
"thread",
"and",
"wait",
"for",
"it",
"to",
"finish",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/utilities/async_tools/event_loop.py#L229-L247 |
23,671 | iotile/coretools | iotilecore/iotile/core/utilities/async_tools/event_loop.py | BackgroundEventLoop.start | def start(self, aug='EventLoopThread'):
"""Ensure the background loop is running.
This method is safe to call multiple times. If the loop is already
running, it will not do anything.
"""
if self.stopping:
raise LoopStoppingError("Cannot perform action while loop is stopping.")
if not self.loop:
self._logger.debug("Starting event loop")
self.loop = asyncio.new_event_loop()
self.thread = threading.Thread(target=self._loop_thread_main, name=aug, daemon=True)
self.thread.start() | python | def start(self, aug='EventLoopThread'):
if self.stopping:
raise LoopStoppingError("Cannot perform action while loop is stopping.")
if not self.loop:
self._logger.debug("Starting event loop")
self.loop = asyncio.new_event_loop()
self.thread = threading.Thread(target=self._loop_thread_main, name=aug, daemon=True)
self.thread.start() | [
"def",
"start",
"(",
"self",
",",
"aug",
"=",
"'EventLoopThread'",
")",
":",
"if",
"self",
".",
"stopping",
":",
"raise",
"LoopStoppingError",
"(",
"\"Cannot perform action while loop is stopping.\"",
")",
"if",
"not",
"self",
".",
"loop",
":",
"self",
".",
"_... | Ensure the background loop is running.
This method is safe to call multiple times. If the loop is already
running, it will not do anything. | [
"Ensure",
"the",
"background",
"loop",
"is",
"running",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/utilities/async_tools/event_loop.py#L281-L295 |
23,672 | iotile/coretools | iotilecore/iotile/core/utilities/async_tools/event_loop.py | BackgroundEventLoop.wait_for_interrupt | def wait_for_interrupt(self, check_interval=1.0, max_time=None):
"""Run the event loop until we receive a ctrl-c interrupt or max_time passes.
This method will wake up every 1 second by default to check for any
interrupt signals or if the maximum runtime has expired. This can be
set lower for testing purpose to reduce latency but in production
settings, this can cause increased CPU usage so 1 second is an
appropriate value.
Args:
check_interval (float): How often to wake up and check for
a SIGTERM. Defaults to 1s. Setting this faster is useful
for unit testing. Cannot be < 0.01 s.
max_time (float): Stop the event loop after max_time seconds.
This is useful for testing purposes. Defaults to None,
which means run forever until interrupt.
"""
self.start()
wait = max(check_interval, 0.01)
accum = 0
try:
while max_time is None or accum < max_time:
try:
time.sleep(wait)
except IOError:
pass # IOError comes when this call is interrupted in a signal handler
accum += wait
except KeyboardInterrupt:
pass | python | def wait_for_interrupt(self, check_interval=1.0, max_time=None):
self.start()
wait = max(check_interval, 0.01)
accum = 0
try:
while max_time is None or accum < max_time:
try:
time.sleep(wait)
except IOError:
pass # IOError comes when this call is interrupted in a signal handler
accum += wait
except KeyboardInterrupt:
pass | [
"def",
"wait_for_interrupt",
"(",
"self",
",",
"check_interval",
"=",
"1.0",
",",
"max_time",
"=",
"None",
")",
":",
"self",
".",
"start",
"(",
")",
"wait",
"=",
"max",
"(",
"check_interval",
",",
"0.01",
")",
"accum",
"=",
"0",
"try",
":",
"while",
... | Run the event loop until we receive a ctrl-c interrupt or max_time passes.
This method will wake up every 1 second by default to check for any
interrupt signals or if the maximum runtime has expired. This can be
set lower for testing purpose to reduce latency but in production
settings, this can cause increased CPU usage so 1 second is an
appropriate value.
Args:
check_interval (float): How often to wake up and check for
a SIGTERM. Defaults to 1s. Setting this faster is useful
for unit testing. Cannot be < 0.01 s.
max_time (float): Stop the event loop after max_time seconds.
This is useful for testing purposes. Defaults to None,
which means run forever until interrupt. | [
"Run",
"the",
"event",
"loop",
"until",
"we",
"receive",
"a",
"ctrl",
"-",
"c",
"interrupt",
"or",
"max_time",
"passes",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/utilities/async_tools/event_loop.py#L297-L329 |
23,673 | iotile/coretools | iotilecore/iotile/core/utilities/async_tools/event_loop.py | BackgroundEventLoop.stop | def stop(self):
"""Synchronously stop the background loop from outside.
This method will block until the background loop is completely stopped
so it cannot be called from inside the loop itself.
This method is safe to call multiple times. If the loop is not
currently running it will return without doing anything.
"""
if not self.loop:
return
if self.inside_loop():
raise InternalError("BackgroundEventLoop.stop() called from inside event loop; "
"would have deadlocked.")
try:
self.run_coroutine(self._stop_internal())
self.thread.join()
except:
self._logger.exception("Error stopping BackgroundEventLoop")
raise
finally:
self.thread = None
self.loop = None
self.tasks = set() | python | def stop(self):
if not self.loop:
return
if self.inside_loop():
raise InternalError("BackgroundEventLoop.stop() called from inside event loop; "
"would have deadlocked.")
try:
self.run_coroutine(self._stop_internal())
self.thread.join()
except:
self._logger.exception("Error stopping BackgroundEventLoop")
raise
finally:
self.thread = None
self.loop = None
self.tasks = set() | [
"def",
"stop",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"loop",
":",
"return",
"if",
"self",
".",
"inside_loop",
"(",
")",
":",
"raise",
"InternalError",
"(",
"\"BackgroundEventLoop.stop() called from inside event loop; \"",
"\"would have deadlocked.\"",
")"... | Synchronously stop the background loop from outside.
This method will block until the background loop is completely stopped
so it cannot be called from inside the loop itself.
This method is safe to call multiple times. If the loop is not
currently running it will return without doing anything. | [
"Synchronously",
"stop",
"the",
"background",
"loop",
"from",
"outside",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/utilities/async_tools/event_loop.py#L331-L357 |
23,674 | iotile/coretools | iotilecore/iotile/core/utilities/async_tools/event_loop.py | BackgroundEventLoop._stop_internal | async def _stop_internal(self):
"""Cleanly stop the event loop after shutting down all tasks."""
# Make sure we only try to stop once
if self.stopping is True:
return
self.stopping = True
awaitables = [task.stop() for task in self.tasks]
results = await asyncio.gather(*awaitables, return_exceptions=True)
for task, result in zip(self.tasks, results):
if isinstance(result, Exception):
self._logger.error("Error stopping task %s: %s", task.name, repr(result))
# It is important to defer this call by one loop cycle so
# that this coroutine is finalized and anyone blocking on it
# resumes execution.
self.loop.call_soon(self.loop.stop) | python | async def _stop_internal(self):
# Make sure we only try to stop once
if self.stopping is True:
return
self.stopping = True
awaitables = [task.stop() for task in self.tasks]
results = await asyncio.gather(*awaitables, return_exceptions=True)
for task, result in zip(self.tasks, results):
if isinstance(result, Exception):
self._logger.error("Error stopping task %s: %s", task.name, repr(result))
# It is important to defer this call by one loop cycle so
# that this coroutine is finalized and anyone blocking on it
# resumes execution.
self.loop.call_soon(self.loop.stop) | [
"async",
"def",
"_stop_internal",
"(",
"self",
")",
":",
"# Make sure we only try to stop once",
"if",
"self",
".",
"stopping",
"is",
"True",
":",
"return",
"self",
".",
"stopping",
"=",
"True",
"awaitables",
"=",
"[",
"task",
".",
"stop",
"(",
")",
"for",
... | Cleanly stop the event loop after shutting down all tasks. | [
"Cleanly",
"stop",
"the",
"event",
"loop",
"after",
"shutting",
"down",
"all",
"tasks",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/utilities/async_tools/event_loop.py#L388-L406 |
23,675 | iotile/coretools | iotilecore/iotile/core/utilities/async_tools/event_loop.py | BackgroundEventLoop._loop_thread_main | def _loop_thread_main(self):
"""Main background thread running the event loop."""
asyncio.set_event_loop(self.loop)
self._loop_check.inside_loop = True
try:
self._logger.debug("Starting loop in background thread")
self.loop.run_forever()
self._logger.debug("Finished loop in background thread")
except: # pylint:disable=bare-except;This is a background worker thread.
self._logger.exception("Exception raised from event loop thread")
finally:
self.loop.close() | python | def _loop_thread_main(self):
asyncio.set_event_loop(self.loop)
self._loop_check.inside_loop = True
try:
self._logger.debug("Starting loop in background thread")
self.loop.run_forever()
self._logger.debug("Finished loop in background thread")
except: # pylint:disable=bare-except;This is a background worker thread.
self._logger.exception("Exception raised from event loop thread")
finally:
self.loop.close() | [
"def",
"_loop_thread_main",
"(",
"self",
")",
":",
"asyncio",
".",
"set_event_loop",
"(",
"self",
".",
"loop",
")",
"self",
".",
"_loop_check",
".",
"inside_loop",
"=",
"True",
"try",
":",
"self",
".",
"_logger",
".",
"debug",
"(",
"\"Starting loop in backgr... | Main background thread running the event loop. | [
"Main",
"background",
"thread",
"running",
"the",
"event",
"loop",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/utilities/async_tools/event_loop.py#L408-L421 |
23,676 | iotile/coretools | iotilecore/iotile/core/utilities/async_tools/event_loop.py | BackgroundEventLoop.add_task | def add_task(self, cor, name=None, finalizer=None, stop_timeout=1.0, parent=None):
"""Schedule a task to run on the background event loop.
This method will start the given coroutine as a task and keep track
of it so that it can be properly shutdown which the event loop is
stopped.
If parent is None, the task will be stopped by calling finalizer()
inside the event loop and then awaiting the task. If finalizer is
None then task.cancel() will be called to stop the task. If finalizer
is specified, it is called with a single argument (self, this
BackgroundTask). Finalizer can be a simple function, or any
awaitable. If it is an awaitable it will be awaited.
If parent is not None, it must be a BackgroundTask object previously
created by a call to BackgroundEventLoop.add_task() and this task will be
registered as a subtask of that task. It is that task's job then to
cancel this task or otherwise stop it when it is stopped.
This method is safe to call either from inside the event loop itself
or from any other thread without fear of deadlock or race.
Args:
cor (coroutine or asyncio.Task): An asyncio Task or the coroutine
that we should execute as a task. If a coroutine is given
it is scheduled as a task in threadsafe manner automatically.
name (str): The name of the task for pretty printing and debug
purposes. If not specified, it defaults to the underlying
asyncio task object instance name.
finalizer (callable): An optional callable that should be
invoked to cancel the task. If not specified, calling stop()
will result in cancel() being called on the underlying task.
stop_timeout (float): The maximum amount of time to wait for this
task to stop when stop() is called in seconds. None indicates
an unlimited amount of time. Default is 1.
This is ignored if parent is not None.
parent (BackgroundTask): A previously created task that will take
responsibility for stopping this task when it is stopped.
Returns:
BackgroundTask: The BackgroundTask representing this task.
"""
if self.stopping:
raise LoopStoppingError("Cannot add task because loop is stopping")
# Ensure the loop exists and is started
self.start()
if parent is not None and parent not in self.tasks:
raise ArgumentError("Designated parent task {} is not registered".format(parent))
task = BackgroundTask(cor, name, finalizer, stop_timeout, loop=self)
if parent is None:
self.tasks.add(task)
self._logger.debug("Added primary task %s", task.name)
else:
parent.add_subtask(task)
self._logger.debug("Added subtask %s to parent %s", task.name, parent.name)
return task | python | def add_task(self, cor, name=None, finalizer=None, stop_timeout=1.0, parent=None):
if self.stopping:
raise LoopStoppingError("Cannot add task because loop is stopping")
# Ensure the loop exists and is started
self.start()
if parent is not None and parent not in self.tasks:
raise ArgumentError("Designated parent task {} is not registered".format(parent))
task = BackgroundTask(cor, name, finalizer, stop_timeout, loop=self)
if parent is None:
self.tasks.add(task)
self._logger.debug("Added primary task %s", task.name)
else:
parent.add_subtask(task)
self._logger.debug("Added subtask %s to parent %s", task.name, parent.name)
return task | [
"def",
"add_task",
"(",
"self",
",",
"cor",
",",
"name",
"=",
"None",
",",
"finalizer",
"=",
"None",
",",
"stop_timeout",
"=",
"1.0",
",",
"parent",
"=",
"None",
")",
":",
"if",
"self",
".",
"stopping",
":",
"raise",
"LoopStoppingError",
"(",
"\"Cannot... | Schedule a task to run on the background event loop.
This method will start the given coroutine as a task and keep track
of it so that it can be properly shutdown which the event loop is
stopped.
If parent is None, the task will be stopped by calling finalizer()
inside the event loop and then awaiting the task. If finalizer is
None then task.cancel() will be called to stop the task. If finalizer
is specified, it is called with a single argument (self, this
BackgroundTask). Finalizer can be a simple function, or any
awaitable. If it is an awaitable it will be awaited.
If parent is not None, it must be a BackgroundTask object previously
created by a call to BackgroundEventLoop.add_task() and this task will be
registered as a subtask of that task. It is that task's job then to
cancel this task or otherwise stop it when it is stopped.
This method is safe to call either from inside the event loop itself
or from any other thread without fear of deadlock or race.
Args:
cor (coroutine or asyncio.Task): An asyncio Task or the coroutine
that we should execute as a task. If a coroutine is given
it is scheduled as a task in threadsafe manner automatically.
name (str): The name of the task for pretty printing and debug
purposes. If not specified, it defaults to the underlying
asyncio task object instance name.
finalizer (callable): An optional callable that should be
invoked to cancel the task. If not specified, calling stop()
will result in cancel() being called on the underlying task.
stop_timeout (float): The maximum amount of time to wait for this
task to stop when stop() is called in seconds. None indicates
an unlimited amount of time. Default is 1.
This is ignored if parent is not None.
parent (BackgroundTask): A previously created task that will take
responsibility for stopping this task when it is stopped.
Returns:
BackgroundTask: The BackgroundTask representing this task. | [
"Schedule",
"a",
"task",
"to",
"run",
"on",
"the",
"background",
"event",
"loop",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/utilities/async_tools/event_loop.py#L424-L487 |
23,677 | iotile/coretools | iotilecore/iotile/core/utilities/async_tools/event_loop.py | BackgroundEventLoop.run_coroutine | def run_coroutine(self, cor, *args, **kwargs):
"""Run a coroutine to completion and return its result.
This method may only be called outside of the event loop.
Attempting to call it from inside the event loop would deadlock
and will raise InternalError instead.
Args:
cor (coroutine): The coroutine that we wish to run in the
background and wait until it finishes.
Returns:
object: Whatever the coroutine cor returns.
"""
if self.stopping:
raise LoopStoppingError("Could not launch coroutine because loop is shutting down: %s" % cor)
self.start()
cor = _instaniate_coroutine(cor, args, kwargs)
if self.inside_loop():
raise InternalError("BackgroundEventLoop.run_coroutine called from inside event loop, "
"would have deadlocked.")
future = self.launch_coroutine(cor)
return future.result() | python | def run_coroutine(self, cor, *args, **kwargs):
if self.stopping:
raise LoopStoppingError("Could not launch coroutine because loop is shutting down: %s" % cor)
self.start()
cor = _instaniate_coroutine(cor, args, kwargs)
if self.inside_loop():
raise InternalError("BackgroundEventLoop.run_coroutine called from inside event loop, "
"would have deadlocked.")
future = self.launch_coroutine(cor)
return future.result() | [
"def",
"run_coroutine",
"(",
"self",
",",
"cor",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"self",
".",
"stopping",
":",
"raise",
"LoopStoppingError",
"(",
"\"Could not launch coroutine because loop is shutting down: %s\"",
"%",
"cor",
")",
"sel... | Run a coroutine to completion and return its result.
This method may only be called outside of the event loop.
Attempting to call it from inside the event loop would deadlock
and will raise InternalError instead.
Args:
cor (coroutine): The coroutine that we wish to run in the
background and wait until it finishes.
Returns:
object: Whatever the coroutine cor returns. | [
"Run",
"a",
"coroutine",
"to",
"completion",
"and",
"return",
"its",
"result",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/utilities/async_tools/event_loop.py#L489-L516 |
23,678 | iotile/coretools | iotilecore/iotile/core/utilities/async_tools/event_loop.py | BackgroundEventLoop.log_coroutine | def log_coroutine(self, cor, *args, **kwargs):
"""Run a coroutine logging any exception raised.
This routine will not block until the coroutine is finished
nor will it return any result. It will just log if any
exception is raised by the coroutine during operation.
It is safe to call from both inside and outside the event loop.
There is no guarantee on how soon the coroutine will be scheduled.
Args:
cor (coroutine): The coroutine that we wish to run in the
background and wait until it finishes.
"""
if self.stopping:
raise LoopStoppingError("Could not launch coroutine because loop is shutting down: %s" % cor)
self.start()
cor = _instaniate_coroutine(cor, args, kwargs)
def _run_and_log():
task = self.loop.create_task(cor)
task.add_done_callback(lambda x: _log_future_exception(x, self._logger))
if self.inside_loop():
_run_and_log()
else:
self.loop.call_soon_threadsafe(_run_and_log) | python | def log_coroutine(self, cor, *args, **kwargs):
if self.stopping:
raise LoopStoppingError("Could not launch coroutine because loop is shutting down: %s" % cor)
self.start()
cor = _instaniate_coroutine(cor, args, kwargs)
def _run_and_log():
task = self.loop.create_task(cor)
task.add_done_callback(lambda x: _log_future_exception(x, self._logger))
if self.inside_loop():
_run_and_log()
else:
self.loop.call_soon_threadsafe(_run_and_log) | [
"def",
"log_coroutine",
"(",
"self",
",",
"cor",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"self",
".",
"stopping",
":",
"raise",
"LoopStoppingError",
"(",
"\"Could not launch coroutine because loop is shutting down: %s\"",
"%",
"cor",
")",
"sel... | Run a coroutine logging any exception raised.
This routine will not block until the coroutine is finished
nor will it return any result. It will just log if any
exception is raised by the coroutine during operation.
It is safe to call from both inside and outside the event loop.
There is no guarantee on how soon the coroutine will be scheduled.
Args:
cor (coroutine): The coroutine that we wish to run in the
background and wait until it finishes. | [
"Run",
"a",
"coroutine",
"logging",
"any",
"exception",
"raised",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/utilities/async_tools/event_loop.py#L552-L582 |
23,679 | iotile/coretools | iotile_ext_cloud/iotile/cloud/config.py | link_cloud | def link_cloud(self, username=None, password=None, device_id=None):
"""Create and store a token for interacting with the IOTile Cloud API.
You will need to call link_cloud once for each virtualenv that
you create and want to use with any api calls that touch iotile cloud.
Note that this method is called on a ConfigManager instance
If you do not pass your username or password it will be prompted from
you securely on stdin.
If you are logging in for a user, the token will expire periodically and you
will have to relogin.
If you pass a device_id, you can obtain a limited token for that device
that will never expire, assuming you have access to that device.
Args:
username (string): Your iotile.cloud username. This is prompted
from stdin if not provided.
password (string): Your iotile.cloud password. This is prompted
from stdin if not provided.
device_id (int): Optional device id to obtain permanent credentials
for a device.
"""
reg = ComponentRegistry()
domain = self.get('cloud:server')
if username is None:
prompt_str = "Please enter your IOTile.cloud email: "
username = input(prompt_str)
if password is None:
prompt_str = "Please enter your IOTile.cloud password: "
password = getpass.getpass(prompt_str)
cloud = Api(domain=domain)
ok_resp = cloud.login(email=username, password=password)
if not ok_resp:
raise ArgumentError("Could not login to iotile.cloud as user %s" % username)
reg.set_config('arch:cloud_user', cloud.username)
reg.set_config('arch:cloud_token', cloud.token)
reg.set_config('arch:cloud_token_type', cloud.token_type)
if device_id is not None:
cloud = IOTileCloud()
cloud.impersonate_device(device_id) | python | def link_cloud(self, username=None, password=None, device_id=None):
reg = ComponentRegistry()
domain = self.get('cloud:server')
if username is None:
prompt_str = "Please enter your IOTile.cloud email: "
username = input(prompt_str)
if password is None:
prompt_str = "Please enter your IOTile.cloud password: "
password = getpass.getpass(prompt_str)
cloud = Api(domain=domain)
ok_resp = cloud.login(email=username, password=password)
if not ok_resp:
raise ArgumentError("Could not login to iotile.cloud as user %s" % username)
reg.set_config('arch:cloud_user', cloud.username)
reg.set_config('arch:cloud_token', cloud.token)
reg.set_config('arch:cloud_token_type', cloud.token_type)
if device_id is not None:
cloud = IOTileCloud()
cloud.impersonate_device(device_id) | [
"def",
"link_cloud",
"(",
"self",
",",
"username",
"=",
"None",
",",
"password",
"=",
"None",
",",
"device_id",
"=",
"None",
")",
":",
"reg",
"=",
"ComponentRegistry",
"(",
")",
"domain",
"=",
"self",
".",
"get",
"(",
"'cloud:server'",
")",
"if",
"user... | Create and store a token for interacting with the IOTile Cloud API.
You will need to call link_cloud once for each virtualenv that
you create and want to use with any api calls that touch iotile cloud.
Note that this method is called on a ConfigManager instance
If you do not pass your username or password it will be prompted from
you securely on stdin.
If you are logging in for a user, the token will expire periodically and you
will have to relogin.
If you pass a device_id, you can obtain a limited token for that device
that will never expire, assuming you have access to that device.
Args:
username (string): Your iotile.cloud username. This is prompted
from stdin if not provided.
password (string): Your iotile.cloud password. This is prompted
from stdin if not provided.
device_id (int): Optional device id to obtain permanent credentials
for a device. | [
"Create",
"and",
"store",
"a",
"token",
"for",
"interacting",
"with",
"the",
"IOTile",
"Cloud",
"API",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotile_ext_cloud/iotile/cloud/config.py#L14-L65 |
23,680 | iotile/coretools | iotilecore/iotile/core/utilities/kvstore_json.py | JSONKVStore._load_file | def _load_file(self):
"""Load all entries from json backing file
"""
if not os.path.exists(self.file):
return {}
with open(self.file, "r") as infile:
data = json.load(infile)
return data | python | def _load_file(self):
if not os.path.exists(self.file):
return {}
with open(self.file, "r") as infile:
data = json.load(infile)
return data | [
"def",
"_load_file",
"(",
"self",
")",
":",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"self",
".",
"file",
")",
":",
"return",
"{",
"}",
"with",
"open",
"(",
"self",
".",
"file",
",",
"\"r\"",
")",
"as",
"infile",
":",
"data",
"=",
"j... | Load all entries from json backing file | [
"Load",
"all",
"entries",
"from",
"json",
"backing",
"file"
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/utilities/kvstore_json.py#L48-L58 |
23,681 | iotile/coretools | iotilecore/iotile/core/utilities/kvstore_json.py | JSONKVStore._save_file | def _save_file(self, data):
"""Attempt to atomically save file by saving and then moving into position
The goal is to make it difficult for a crash to corrupt our data file since
the move operation can be made atomic if needed on mission critical filesystems.
"""
if platform.system() == 'Windows':
with open(self.file, "w") as outfile:
json.dump(data, outfile)
else:
newpath = self.file + '.new'
with open(newpath, "w") as outfile:
json.dump(data, outfile)
os.rename(
os.path.realpath(newpath),
os.path.realpath(self.file)
) | python | def _save_file(self, data):
if platform.system() == 'Windows':
with open(self.file, "w") as outfile:
json.dump(data, outfile)
else:
newpath = self.file + '.new'
with open(newpath, "w") as outfile:
json.dump(data, outfile)
os.rename(
os.path.realpath(newpath),
os.path.realpath(self.file)
) | [
"def",
"_save_file",
"(",
"self",
",",
"data",
")",
":",
"if",
"platform",
".",
"system",
"(",
")",
"==",
"'Windows'",
":",
"with",
"open",
"(",
"self",
".",
"file",
",",
"\"w\"",
")",
"as",
"outfile",
":",
"json",
".",
"dump",
"(",
"data",
",",
... | Attempt to atomically save file by saving and then moving into position
The goal is to make it difficult for a crash to corrupt our data file since
the move operation can be made atomic if needed on mission critical filesystems. | [
"Attempt",
"to",
"atomically",
"save",
"file",
"by",
"saving",
"and",
"then",
"moving",
"into",
"position"
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/utilities/kvstore_json.py#L60-L79 |
23,682 | iotile/coretools | iotilecore/iotile/core/utilities/kvstore_json.py | JSONKVStore.remove | def remove(self, key):
"""Remove a key from the data store
Args:
key (string): The key to remove
Raises:
KeyError: if the key was not found
"""
data = self._load_file()
del data[key]
self._save_file(data) | python | def remove(self, key):
data = self._load_file()
del data[key]
self._save_file(data) | [
"def",
"remove",
"(",
"self",
",",
"key",
")",
":",
"data",
"=",
"self",
".",
"_load_file",
"(",
")",
"del",
"data",
"[",
"key",
"]",
"self",
".",
"_save_file",
"(",
"data",
")"
] | Remove a key from the data store
Args:
key (string): The key to remove
Raises:
KeyError: if the key was not found | [
"Remove",
"a",
"key",
"from",
"the",
"data",
"store"
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/utilities/kvstore_json.py#L107-L119 |
23,683 | iotile/coretools | iotilecore/iotile/core/utilities/kvstore_json.py | JSONKVStore.set | def set(self, key, value):
"""Set the value of a key
Args:
key (string): The key used to store this value
value (string): The value to store
"""
data = self._load_file()
data[key] = value
self._save_file(data) | python | def set(self, key, value):
data = self._load_file()
data[key] = value
self._save_file(data) | [
"def",
"set",
"(",
"self",
",",
"key",
",",
"value",
")",
":",
"data",
"=",
"self",
".",
"_load_file",
"(",
")",
"data",
"[",
"key",
"]",
"=",
"value",
"self",
".",
"_save_file",
"(",
"data",
")"
] | Set the value of a key
Args:
key (string): The key used to store this value
value (string): The value to store | [
"Set",
"the",
"value",
"of",
"a",
"key"
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/utilities/kvstore_json.py#L134-L144 |
23,684 | iotile/coretools | iotilesensorgraph/iotile/sg/parser/scopes/trigger_scope.py | TriggerScope.trigger_chain | def trigger_chain(self):
"""Return a NodeInput tuple for creating a node.
Returns:
(StreamIdentifier, InputTrigger)
"""
trigger_stream = self.allocator.attach_stream(self.trigger_stream)
return (trigger_stream, self.trigger_cond) | python | def trigger_chain(self):
trigger_stream = self.allocator.attach_stream(self.trigger_stream)
return (trigger_stream, self.trigger_cond) | [
"def",
"trigger_chain",
"(",
"self",
")",
":",
"trigger_stream",
"=",
"self",
".",
"allocator",
".",
"attach_stream",
"(",
"self",
".",
"trigger_stream",
")",
"return",
"(",
"trigger_stream",
",",
"self",
".",
"trigger_cond",
")"
] | Return a NodeInput tuple for creating a node.
Returns:
(StreamIdentifier, InputTrigger) | [
"Return",
"a",
"NodeInput",
"tuple",
"for",
"creating",
"a",
"node",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilesensorgraph/iotile/sg/parser/scopes/trigger_scope.py#L36-L44 |
23,685 | iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/cc.py | generate | def generate(env):
"""
Add Builders and construction variables for C compilers to an Environment.
"""
static_obj, shared_obj = SCons.Tool.createObjBuilders(env)
for suffix in CSuffixes:
static_obj.add_action(suffix, SCons.Defaults.CAction)
shared_obj.add_action(suffix, SCons.Defaults.ShCAction)
static_obj.add_emitter(suffix, SCons.Defaults.StaticObjectEmitter)
shared_obj.add_emitter(suffix, SCons.Defaults.SharedObjectEmitter)
add_common_cc_variables(env)
if 'CC' not in env:
env['CC'] = env.Detect(compilers) or compilers[0]
env['CFLAGS'] = SCons.Util.CLVar('')
env['CCCOM'] = '$CC -o $TARGET -c $CFLAGS $CCFLAGS $_CCCOMCOM $SOURCES'
env['SHCC'] = '$CC'
env['SHCFLAGS'] = SCons.Util.CLVar('$CFLAGS')
env['SHCCCOM'] = '$SHCC -o $TARGET -c $SHCFLAGS $SHCCFLAGS $_CCCOMCOM $SOURCES'
env['CPPDEFPREFIX'] = '-D'
env['CPPDEFSUFFIX'] = ''
env['INCPREFIX'] = '-I'
env['INCSUFFIX'] = ''
env['SHOBJSUFFIX'] = '.os'
env['STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME'] = 0
env['CFILESUFFIX'] = '.c' | python | def generate(env):
static_obj, shared_obj = SCons.Tool.createObjBuilders(env)
for suffix in CSuffixes:
static_obj.add_action(suffix, SCons.Defaults.CAction)
shared_obj.add_action(suffix, SCons.Defaults.ShCAction)
static_obj.add_emitter(suffix, SCons.Defaults.StaticObjectEmitter)
shared_obj.add_emitter(suffix, SCons.Defaults.SharedObjectEmitter)
add_common_cc_variables(env)
if 'CC' not in env:
env['CC'] = env.Detect(compilers) or compilers[0]
env['CFLAGS'] = SCons.Util.CLVar('')
env['CCCOM'] = '$CC -o $TARGET -c $CFLAGS $CCFLAGS $_CCCOMCOM $SOURCES'
env['SHCC'] = '$CC'
env['SHCFLAGS'] = SCons.Util.CLVar('$CFLAGS')
env['SHCCCOM'] = '$SHCC -o $TARGET -c $SHCFLAGS $SHCCFLAGS $_CCCOMCOM $SOURCES'
env['CPPDEFPREFIX'] = '-D'
env['CPPDEFSUFFIX'] = ''
env['INCPREFIX'] = '-I'
env['INCSUFFIX'] = ''
env['SHOBJSUFFIX'] = '.os'
env['STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME'] = 0
env['CFILESUFFIX'] = '.c' | [
"def",
"generate",
"(",
"env",
")",
":",
"static_obj",
",",
"shared_obj",
"=",
"SCons",
".",
"Tool",
".",
"createObjBuilders",
"(",
"env",
")",
"for",
"suffix",
"in",
"CSuffixes",
":",
"static_obj",
".",
"add_action",
"(",
"suffix",
",",
"SCons",
".",
"D... | Add Builders and construction variables for C compilers to an Environment. | [
"Add",
"Builders",
"and",
"construction",
"variables",
"for",
"C",
"compilers",
"to",
"an",
"Environment",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/cc.py#L67-L96 |
23,686 | iotile/coretools | iotilesensorgraph/iotile/sg/scripts/iotile_sgrun.py | process_mock_rpc | def process_mock_rpc(input_string):
"""Process a mock RPC argument.
Args:
input_string (str): The input string that should be in the format
<slot id>:<rpc id> = value
"""
spec, equals, value = input_string.partition(u'=')
if len(equals) == 0:
print("Could not parse mock RPC argument: {}".format(input_string))
sys.exit(1)
try:
value = int(value.strip(), 0)
except ValueError as exc:
print("Could not parse mock RPC value: {}".format(str(exc)))
sys.exit(1)
slot, part, rpc_id = spec.partition(u":")
if len(part) == 0:
print("Could not parse mock RPC slot/rpc definition: {}".format(spec))
sys.exit(1)
try:
slot = SlotIdentifier.FromString(slot)
except ArgumentError as exc:
print("Could not parse slot id in mock RPC definition: {}".format(exc.msg))
sys.exit(1)
try:
rpc_id = int(rpc_id, 0)
except ValueError as exc:
print("Could not parse mock RPC number: {}".format(str(exc)))
sys.exit(1)
return slot, rpc_id, value | python | def process_mock_rpc(input_string):
spec, equals, value = input_string.partition(u'=')
if len(equals) == 0:
print("Could not parse mock RPC argument: {}".format(input_string))
sys.exit(1)
try:
value = int(value.strip(), 0)
except ValueError as exc:
print("Could not parse mock RPC value: {}".format(str(exc)))
sys.exit(1)
slot, part, rpc_id = spec.partition(u":")
if len(part) == 0:
print("Could not parse mock RPC slot/rpc definition: {}".format(spec))
sys.exit(1)
try:
slot = SlotIdentifier.FromString(slot)
except ArgumentError as exc:
print("Could not parse slot id in mock RPC definition: {}".format(exc.msg))
sys.exit(1)
try:
rpc_id = int(rpc_id, 0)
except ValueError as exc:
print("Could not parse mock RPC number: {}".format(str(exc)))
sys.exit(1)
return slot, rpc_id, value | [
"def",
"process_mock_rpc",
"(",
"input_string",
")",
":",
"spec",
",",
"equals",
",",
"value",
"=",
"input_string",
".",
"partition",
"(",
"u'='",
")",
"if",
"len",
"(",
"equals",
")",
"==",
"0",
":",
"print",
"(",
"\"Could not parse mock RPC argument: {}\"",
... | Process a mock RPC argument.
Args:
input_string (str): The input string that should be in the format
<slot id>:<rpc id> = value | [
"Process",
"a",
"mock",
"RPC",
"argument",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilesensorgraph/iotile/sg/scripts/iotile_sgrun.py#L77-L114 |
23,687 | iotile/coretools | iotilesensorgraph/iotile/sg/scripts/iotile_sgrun.py | watch_printer | def watch_printer(watch, value):
"""Print a watched value.
Args:
watch (DataStream): The stream that was watched
value (IOTileReading): The value to was seen
"""
print("({: 8} s) {}: {}".format(value.raw_time, watch, value.value)) | python | def watch_printer(watch, value):
print("({: 8} s) {}: {}".format(value.raw_time, watch, value.value)) | [
"def",
"watch_printer",
"(",
"watch",
",",
"value",
")",
":",
"print",
"(",
"\"({: 8} s) {}: {}\"",
".",
"format",
"(",
"value",
".",
"raw_time",
",",
"watch",
",",
"value",
".",
"value",
")",
")"
] | Print a watched value.
Args:
watch (DataStream): The stream that was watched
value (IOTileReading): The value to was seen | [
"Print",
"a",
"watched",
"value",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilesensorgraph/iotile/sg/scripts/iotile_sgrun.py#L116-L124 |
23,688 | iotile/coretools | iotilesensorgraph/iotile/sg/scripts/iotile_sgrun.py | main | def main(argv=None):
"""Main entry point for iotile sensorgraph simulator.
This is the iotile-sgrun command line program. It takes
an optional set of command line parameters to allow for
testing.
Args:
argv (list of str): An optional set of command line
parameters. If not passed, these are taken from
sys.argv.
"""
if argv is None:
argv = sys.argv[1:]
try:
executor = None
parser = build_args()
args = parser.parse_args(args=argv)
model = DeviceModel()
parser = SensorGraphFileParser()
parser.parse_file(args.sensor_graph)
parser.compile(model)
if not args.disable_optimizer:
opt = SensorGraphOptimizer()
opt.optimize(parser.sensor_graph, model=model)
graph = parser.sensor_graph
sim = SensorGraphSimulator(graph)
for stop in args.stop:
sim.stop_condition(stop)
for watch in args.watch:
watch_sel = DataStreamSelector.FromString(watch)
graph.sensor_log.watch(watch_sel, watch_printer)
# If we are semihosting, create the appropriate executor connected to the device
if args.semihost_device is not None:
executor = SemihostedRPCExecutor(args.port, args.semihost_device)
sim.rpc_executor = executor
for mock in args.mock_rpc:
slot, rpc_id, value = process_mock_rpc(mock)
sim.rpc_executor.mock(slot, rpc_id, value)
for stim in args.stimulus:
sim.stimulus(stim)
graph.load_constants()
if args.trace is not None:
sim.record_trace()
try:
if args.connected:
sim.step(user_connected, 8)
sim.run(accelerated=not args.realtime)
except KeyboardInterrupt:
pass
if args.trace is not None:
sim.trace.save(args.trace)
finally:
if executor is not None:
executor.hw.close()
return 0 | python | def main(argv=None):
if argv is None:
argv = sys.argv[1:]
try:
executor = None
parser = build_args()
args = parser.parse_args(args=argv)
model = DeviceModel()
parser = SensorGraphFileParser()
parser.parse_file(args.sensor_graph)
parser.compile(model)
if not args.disable_optimizer:
opt = SensorGraphOptimizer()
opt.optimize(parser.sensor_graph, model=model)
graph = parser.sensor_graph
sim = SensorGraphSimulator(graph)
for stop in args.stop:
sim.stop_condition(stop)
for watch in args.watch:
watch_sel = DataStreamSelector.FromString(watch)
graph.sensor_log.watch(watch_sel, watch_printer)
# If we are semihosting, create the appropriate executor connected to the device
if args.semihost_device is not None:
executor = SemihostedRPCExecutor(args.port, args.semihost_device)
sim.rpc_executor = executor
for mock in args.mock_rpc:
slot, rpc_id, value = process_mock_rpc(mock)
sim.rpc_executor.mock(slot, rpc_id, value)
for stim in args.stimulus:
sim.stimulus(stim)
graph.load_constants()
if args.trace is not None:
sim.record_trace()
try:
if args.connected:
sim.step(user_connected, 8)
sim.run(accelerated=not args.realtime)
except KeyboardInterrupt:
pass
if args.trace is not None:
sim.trace.save(args.trace)
finally:
if executor is not None:
executor.hw.close()
return 0 | [
"def",
"main",
"(",
"argv",
"=",
"None",
")",
":",
"if",
"argv",
"is",
"None",
":",
"argv",
"=",
"sys",
".",
"argv",
"[",
"1",
":",
"]",
"try",
":",
"executor",
"=",
"None",
"parser",
"=",
"build_args",
"(",
")",
"args",
"=",
"parser",
".",
"pa... | Main entry point for iotile sensorgraph simulator.
This is the iotile-sgrun command line program. It takes
an optional set of command line parameters to allow for
testing.
Args:
argv (list of str): An optional set of command line
parameters. If not passed, these are taken from
sys.argv. | [
"Main",
"entry",
"point",
"for",
"iotile",
"sensorgraph",
"simulator",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilesensorgraph/iotile/sg/scripts/iotile_sgrun.py#L127-L199 |
23,689 | iotile/coretools | iotileship/iotile/ship/actions/verify_device_step.py | VerifyDeviceStep._verify_tile_versions | def _verify_tile_versions(self, hw):
"""Verify that the tiles have the correct versions
"""
for tile, expected_tile_version in self._tile_versions.items():
actual_tile_version = str(hw.get(tile).tile_version())
if expected_tile_version != actual_tile_version:
raise ArgumentError("Tile has incorrect firmware", tile=tile, \
expected_version=expected_tile_version, actual_version=actual_tile_version) | python | def _verify_tile_versions(self, hw):
for tile, expected_tile_version in self._tile_versions.items():
actual_tile_version = str(hw.get(tile).tile_version())
if expected_tile_version != actual_tile_version:
raise ArgumentError("Tile has incorrect firmware", tile=tile, \
expected_version=expected_tile_version, actual_version=actual_tile_version) | [
"def",
"_verify_tile_versions",
"(",
"self",
",",
"hw",
")",
":",
"for",
"tile",
",",
"expected_tile_version",
"in",
"self",
".",
"_tile_versions",
".",
"items",
"(",
")",
":",
"actual_tile_version",
"=",
"str",
"(",
"hw",
".",
"get",
"(",
"tile",
")",
"... | Verify that the tiles have the correct versions | [
"Verify",
"that",
"the",
"tiles",
"have",
"the",
"correct",
"versions"
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotileship/iotile/ship/actions/verify_device_step.py#L33-L40 |
23,690 | iotile/coretools | iotileship/iotile/ship/actions/verify_device_step.py | VerifyDeviceStep._verify_realtime_streams | def _verify_realtime_streams(self, hw):
"""Check that the realtime streams are being produced
"""
print("--> Testing realtime data (takes 2 seconds)")
time.sleep(2.1)
reports = [x for x in hw.iter_reports()]
reports_seen = {key: 0 for key in self._realtime_streams}
for report in reports:
stream_value = report.visible_readings[0].stream
if reports_seen.get(stream_value) is not None:
reports_seen[stream_value] += 1
for stream in reports_seen.keys():
if reports_seen[stream] < 2:
raise ArgumentError("Realtime Stream not pushing any reports", stream=hex(stream), \
reports_seen=reports_seen[stream]) | python | def _verify_realtime_streams(self, hw):
print("--> Testing realtime data (takes 2 seconds)")
time.sleep(2.1)
reports = [x for x in hw.iter_reports()]
reports_seen = {key: 0 for key in self._realtime_streams}
for report in reports:
stream_value = report.visible_readings[0].stream
if reports_seen.get(stream_value) is not None:
reports_seen[stream_value] += 1
for stream in reports_seen.keys():
if reports_seen[stream] < 2:
raise ArgumentError("Realtime Stream not pushing any reports", stream=hex(stream), \
reports_seen=reports_seen[stream]) | [
"def",
"_verify_realtime_streams",
"(",
"self",
",",
"hw",
")",
":",
"print",
"(",
"\"--> Testing realtime data (takes 2 seconds)\"",
")",
"time",
".",
"sleep",
"(",
"2.1",
")",
"reports",
"=",
"[",
"x",
"for",
"x",
"in",
"hw",
".",
"iter_reports",
"(",
")",... | Check that the realtime streams are being produced | [
"Check",
"that",
"the",
"realtime",
"streams",
"are",
"being",
"produced"
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotileship/iotile/ship/actions/verify_device_step.py#L64-L80 |
23,691 | iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/xgettext.py | _update_pot_file | def _update_pot_file(target, source, env):
""" Action function for `POTUpdate` builder """
import re
import os
import SCons.Action
nop = lambda target, source, env: 0
# Save scons cwd and os cwd (NOTE: they may be different. After the job, we
# revert each one to its original state).
save_cwd = env.fs.getcwd()
save_os_cwd = os.getcwd()
chdir = target[0].dir
chdir_str = repr(chdir.get_abspath())
# Print chdir message (employ SCons.Action.Action for that. It knows better
# than me how to to this correctly).
env.Execute(SCons.Action.Action(nop, "Entering " + chdir_str))
# Go to target's directory and do our job
env.fs.chdir(chdir, 1) # Go into target's directory
try:
cmd = _CmdRunner('$XGETTEXTCOM', '$XGETTEXTCOMSTR')
action = SCons.Action.Action(cmd, strfunction=cmd.strfunction)
status = action([target[0]], source, env)
except:
# Something went wrong.
env.Execute(SCons.Action.Action(nop, "Leaving " + chdir_str))
# Revert working dirs to previous state and re-throw exception.
env.fs.chdir(save_cwd, 0)
os.chdir(save_os_cwd)
raise
# Print chdir message.
env.Execute(SCons.Action.Action(nop, "Leaving " + chdir_str))
# Revert working dirs to previous state.
env.fs.chdir(save_cwd, 0)
os.chdir(save_os_cwd)
# If the command was not successfull, return error code.
if status: return status
new_content = cmd.out
if not new_content:
# When xgettext finds no internationalized messages, no *.pot is created
# (because we don't want to bother translators with empty POT files).
needs_update = False
explain = "no internationalized messages encountered"
else:
if target[0].exists():
# If the file already exists, it's left unaltered unless its messages
# are outdated (w.r.t. to these recovered by xgettext from sources).
old_content = target[0].get_text_contents()
re_cdate = re.compile(r'^"POT-Creation-Date: .*"$[\r\n]?', re.M)
old_content_nocdate = re.sub(re_cdate, "", old_content)
new_content_nocdate = re.sub(re_cdate, "", new_content)
if (old_content_nocdate == new_content_nocdate):
# Messages are up-to-date
needs_update = False
explain = "messages in file found to be up-to-date"
else:
# Messages are outdated
needs_update = True
explain = "messages in file were outdated"
else:
# No POT file found, create new one
needs_update = True
explain = "new file"
if needs_update:
# Print message employing SCons.Action.Action for that.
msg = "Writing " + repr(str(target[0])) + " (" + explain + ")"
env.Execute(SCons.Action.Action(nop, msg))
f = open(str(target[0]), "w")
f.write(new_content)
f.close()
return 0
else:
# Print message employing SCons.Action.Action for that.
msg = "Not writing " + repr(str(target[0])) + " (" + explain + ")"
env.Execute(SCons.Action.Action(nop, msg))
return 0 | python | def _update_pot_file(target, source, env):
import re
import os
import SCons.Action
nop = lambda target, source, env: 0
# Save scons cwd and os cwd (NOTE: they may be different. After the job, we
# revert each one to its original state).
save_cwd = env.fs.getcwd()
save_os_cwd = os.getcwd()
chdir = target[0].dir
chdir_str = repr(chdir.get_abspath())
# Print chdir message (employ SCons.Action.Action for that. It knows better
# than me how to to this correctly).
env.Execute(SCons.Action.Action(nop, "Entering " + chdir_str))
# Go to target's directory and do our job
env.fs.chdir(chdir, 1) # Go into target's directory
try:
cmd = _CmdRunner('$XGETTEXTCOM', '$XGETTEXTCOMSTR')
action = SCons.Action.Action(cmd, strfunction=cmd.strfunction)
status = action([target[0]], source, env)
except:
# Something went wrong.
env.Execute(SCons.Action.Action(nop, "Leaving " + chdir_str))
# Revert working dirs to previous state and re-throw exception.
env.fs.chdir(save_cwd, 0)
os.chdir(save_os_cwd)
raise
# Print chdir message.
env.Execute(SCons.Action.Action(nop, "Leaving " + chdir_str))
# Revert working dirs to previous state.
env.fs.chdir(save_cwd, 0)
os.chdir(save_os_cwd)
# If the command was not successfull, return error code.
if status: return status
new_content = cmd.out
if not new_content:
# When xgettext finds no internationalized messages, no *.pot is created
# (because we don't want to bother translators with empty POT files).
needs_update = False
explain = "no internationalized messages encountered"
else:
if target[0].exists():
# If the file already exists, it's left unaltered unless its messages
# are outdated (w.r.t. to these recovered by xgettext from sources).
old_content = target[0].get_text_contents()
re_cdate = re.compile(r'^"POT-Creation-Date: .*"$[\r\n]?', re.M)
old_content_nocdate = re.sub(re_cdate, "", old_content)
new_content_nocdate = re.sub(re_cdate, "", new_content)
if (old_content_nocdate == new_content_nocdate):
# Messages are up-to-date
needs_update = False
explain = "messages in file found to be up-to-date"
else:
# Messages are outdated
needs_update = True
explain = "messages in file were outdated"
else:
# No POT file found, create new one
needs_update = True
explain = "new file"
if needs_update:
# Print message employing SCons.Action.Action for that.
msg = "Writing " + repr(str(target[0])) + " (" + explain + ")"
env.Execute(SCons.Action.Action(nop, msg))
f = open(str(target[0]), "w")
f.write(new_content)
f.close()
return 0
else:
# Print message employing SCons.Action.Action for that.
msg = "Not writing " + repr(str(target[0])) + " (" + explain + ")"
env.Execute(SCons.Action.Action(nop, msg))
return 0 | [
"def",
"_update_pot_file",
"(",
"target",
",",
"source",
",",
"env",
")",
":",
"import",
"re",
"import",
"os",
"import",
"SCons",
".",
"Action",
"nop",
"=",
"lambda",
"target",
",",
"source",
",",
"env",
":",
"0",
"# Save scons cwd and os cwd (NOTE: they may b... | Action function for `POTUpdate` builder | [
"Action",
"function",
"for",
"POTUpdate",
"builder"
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/xgettext.py#L75-L151 |
23,692 | iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/xgettext.py | _scan_xgettext_from_files | def _scan_xgettext_from_files(target, source, env, files=None, path=None):
""" Parses `POTFILES.in`-like file and returns list of extracted file names.
"""
import re
import SCons.Util
import SCons.Node.FS
if files is None:
return 0
if not SCons.Util.is_List(files):
files = [files]
if path is None:
if 'XGETTEXTPATH' in env:
path = env['XGETTEXTPATH']
else:
path = []
if not SCons.Util.is_List(path):
path = [path]
path = SCons.Util.flatten(path)
dirs = ()
for p in path:
if not isinstance(p, SCons.Node.FS.Base):
if SCons.Util.is_String(p):
p = env.subst(p, source=source, target=target)
p = env.arg2nodes(p, env.fs.Dir)
dirs += tuple(p)
# cwd is the default search path (when no path is defined by user)
if not dirs:
dirs = (env.fs.getcwd(),)
# Parse 'POTFILE.in' files.
re_comment = re.compile(r'^#[^\n\r]*$\r?\n?', re.M)
re_emptyln = re.compile(r'^[ \t\r]*$\r?\n?', re.M)
re_trailws = re.compile(r'[ \t\r]+$')
for f in files:
# Find files in search path $XGETTEXTPATH
if isinstance(f, SCons.Node.FS.Base) and f.rexists():
contents = f.get_text_contents()
contents = re_comment.sub("", contents)
contents = re_emptyln.sub("", contents)
contents = re_trailws.sub("", contents)
depnames = contents.splitlines()
for depname in depnames:
depfile = SCons.Node.FS.find_file(depname, dirs)
if not depfile:
depfile = env.arg2nodes(depname, dirs[0].File)
env.Depends(target, depfile)
return 0 | python | def _scan_xgettext_from_files(target, source, env, files=None, path=None):
import re
import SCons.Util
import SCons.Node.FS
if files is None:
return 0
if not SCons.Util.is_List(files):
files = [files]
if path is None:
if 'XGETTEXTPATH' in env:
path = env['XGETTEXTPATH']
else:
path = []
if not SCons.Util.is_List(path):
path = [path]
path = SCons.Util.flatten(path)
dirs = ()
for p in path:
if not isinstance(p, SCons.Node.FS.Base):
if SCons.Util.is_String(p):
p = env.subst(p, source=source, target=target)
p = env.arg2nodes(p, env.fs.Dir)
dirs += tuple(p)
# cwd is the default search path (when no path is defined by user)
if not dirs:
dirs = (env.fs.getcwd(),)
# Parse 'POTFILE.in' files.
re_comment = re.compile(r'^#[^\n\r]*$\r?\n?', re.M)
re_emptyln = re.compile(r'^[ \t\r]*$\r?\n?', re.M)
re_trailws = re.compile(r'[ \t\r]+$')
for f in files:
# Find files in search path $XGETTEXTPATH
if isinstance(f, SCons.Node.FS.Base) and f.rexists():
contents = f.get_text_contents()
contents = re_comment.sub("", contents)
contents = re_emptyln.sub("", contents)
contents = re_trailws.sub("", contents)
depnames = contents.splitlines()
for depname in depnames:
depfile = SCons.Node.FS.find_file(depname, dirs)
if not depfile:
depfile = env.arg2nodes(depname, dirs[0].File)
env.Depends(target, depfile)
return 0 | [
"def",
"_scan_xgettext_from_files",
"(",
"target",
",",
"source",
",",
"env",
",",
"files",
"=",
"None",
",",
"path",
"=",
"None",
")",
":",
"import",
"re",
"import",
"SCons",
".",
"Util",
"import",
"SCons",
".",
"Node",
".",
"FS",
"if",
"files",
"is",... | Parses `POTFILES.in`-like file and returns list of extracted file names. | [
"Parses",
"POTFILES",
".",
"in",
"-",
"like",
"file",
"and",
"returns",
"list",
"of",
"extracted",
"file",
"names",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/xgettext.py#L175-L225 |
23,693 | iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/xgettext.py | _pot_update_emitter | def _pot_update_emitter(target, source, env):
""" Emitter function for `POTUpdate` builder """
from SCons.Tool.GettextCommon import _POTargetFactory
import SCons.Util
import SCons.Node.FS
if 'XGETTEXTFROM' in env:
xfrom = env['XGETTEXTFROM']
else:
return target, source
if not SCons.Util.is_List(xfrom):
xfrom = [xfrom]
xfrom = SCons.Util.flatten(xfrom)
# Prepare list of 'POTFILE.in' files.
files = []
for xf in xfrom:
if not isinstance(xf, SCons.Node.FS.Base):
if SCons.Util.is_String(xf):
# Interpolate variables in strings
xf = env.subst(xf, source=source, target=target)
xf = env.arg2nodes(xf)
files.extend(xf)
if files:
env.Depends(target, files)
_scan_xgettext_from_files(target, source, env, files)
return target, source | python | def _pot_update_emitter(target, source, env):
from SCons.Tool.GettextCommon import _POTargetFactory
import SCons.Util
import SCons.Node.FS
if 'XGETTEXTFROM' in env:
xfrom = env['XGETTEXTFROM']
else:
return target, source
if not SCons.Util.is_List(xfrom):
xfrom = [xfrom]
xfrom = SCons.Util.flatten(xfrom)
# Prepare list of 'POTFILE.in' files.
files = []
for xf in xfrom:
if not isinstance(xf, SCons.Node.FS.Base):
if SCons.Util.is_String(xf):
# Interpolate variables in strings
xf = env.subst(xf, source=source, target=target)
xf = env.arg2nodes(xf)
files.extend(xf)
if files:
env.Depends(target, files)
_scan_xgettext_from_files(target, source, env, files)
return target, source | [
"def",
"_pot_update_emitter",
"(",
"target",
",",
"source",
",",
"env",
")",
":",
"from",
"SCons",
".",
"Tool",
".",
"GettextCommon",
"import",
"_POTargetFactory",
"import",
"SCons",
".",
"Util",
"import",
"SCons",
".",
"Node",
".",
"FS",
"if",
"'XGETTEXTFRO... | Emitter function for `POTUpdate` builder | [
"Emitter",
"function",
"for",
"POTUpdate",
"builder"
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/xgettext.py#L231-L258 |
23,694 | iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/xgettext.py | _POTUpdateBuilder | def _POTUpdateBuilder(env, **kw):
""" Creates `POTUpdate` builder object """
import SCons.Action
from SCons.Tool.GettextCommon import _POTargetFactory
kw['action'] = SCons.Action.Action(_update_pot_file, None)
kw['suffix'] = '$POTSUFFIX'
kw['target_factory'] = _POTargetFactory(env, alias='$POTUPDATE_ALIAS').File
kw['emitter'] = _pot_update_emitter
return _POTBuilder(**kw) | python | def _POTUpdateBuilder(env, **kw):
import SCons.Action
from SCons.Tool.GettextCommon import _POTargetFactory
kw['action'] = SCons.Action.Action(_update_pot_file, None)
kw['suffix'] = '$POTSUFFIX'
kw['target_factory'] = _POTargetFactory(env, alias='$POTUPDATE_ALIAS').File
kw['emitter'] = _pot_update_emitter
return _POTBuilder(**kw) | [
"def",
"_POTUpdateBuilder",
"(",
"env",
",",
"*",
"*",
"kw",
")",
":",
"import",
"SCons",
".",
"Action",
"from",
"SCons",
".",
"Tool",
".",
"GettextCommon",
"import",
"_POTargetFactory",
"kw",
"[",
"'action'",
"]",
"=",
"SCons",
".",
"Action",
".",
"Acti... | Creates `POTUpdate` builder object | [
"Creates",
"POTUpdate",
"builder",
"object"
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/xgettext.py#L275-L283 |
23,695 | iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/xgettext.py | generate | def generate(env, **kw):
""" Generate `xgettext` tool """
import SCons.Util
from SCons.Tool.GettextCommon import RPaths, _detect_xgettext
try:
env['XGETTEXT'] = _detect_xgettext(env)
except:
env['XGETTEXT'] = 'xgettext'
# NOTE: sources="$SOURCES" would work as well. However, we use following
# construction to convert absolute paths provided by scons onto paths
# relative to current working dir. Note, that scons expands $SOURCE(S) to
# absolute paths for sources $SOURCE(s) outside of current subtree (e.g. in
# "../"). With source=$SOURCE these absolute paths would be written to the
# resultant *.pot file (and its derived *.po files) as references to lines in
# source code (e.g. referring lines in *.c files). Such references would be
# correct (e.g. in poedit) only on machine on which *.pot was generated and
# would be of no use on other hosts (having a copy of source code located
# in different place in filesystem).
sources = '$( ${_concat( "", SOURCES, "", __env__, XgettextRPaths, TARGET' \
+ ', SOURCES)} $)'
# NOTE: the output from $XGETTEXTCOM command must go to stdout, not to a file.
# This is required by the POTUpdate builder's action.
xgettextcom = '$XGETTEXT $XGETTEXTFLAGS $_XGETTEXTPATHFLAGS' \
+ ' $_XGETTEXTFROMFLAGS -o - ' + sources
xgettextpathflags = '$( ${_concat( XGETTEXTPATHPREFIX, XGETTEXTPATH' \
+ ', XGETTEXTPATHSUFFIX, __env__, RDirs, TARGET, SOURCES)} $)'
xgettextfromflags = '$( ${_concat( XGETTEXTFROMPREFIX, XGETTEXTFROM' \
+ ', XGETTEXTFROMSUFFIX, __env__, target=TARGET, source=SOURCES)} $)'
env.SetDefault(
_XGETTEXTDOMAIN='${TARGET.filebase}',
XGETTEXTFLAGS=[],
XGETTEXTCOM=xgettextcom,
XGETTEXTCOMSTR='',
XGETTEXTPATH=[],
XGETTEXTPATHPREFIX='-D',
XGETTEXTPATHSUFFIX='',
XGETTEXTFROM=None,
XGETTEXTFROMPREFIX='-f',
XGETTEXTFROMSUFFIX='',
_XGETTEXTPATHFLAGS=xgettextpathflags,
_XGETTEXTFROMFLAGS=xgettextfromflags,
POTSUFFIX=['.pot'],
POTUPDATE_ALIAS='pot-update',
XgettextRPaths=RPaths(env)
)
env.Append(BUILDERS={
'_POTUpdateBuilder': _POTUpdateBuilder(env)
})
env.AddMethod(_POTUpdateBuilderWrapper, 'POTUpdate')
env.AlwaysBuild(env.Alias('$POTUPDATE_ALIAS')) | python | def generate(env, **kw):
import SCons.Util
from SCons.Tool.GettextCommon import RPaths, _detect_xgettext
try:
env['XGETTEXT'] = _detect_xgettext(env)
except:
env['XGETTEXT'] = 'xgettext'
# NOTE: sources="$SOURCES" would work as well. However, we use following
# construction to convert absolute paths provided by scons onto paths
# relative to current working dir. Note, that scons expands $SOURCE(S) to
# absolute paths for sources $SOURCE(s) outside of current subtree (e.g. in
# "../"). With source=$SOURCE these absolute paths would be written to the
# resultant *.pot file (and its derived *.po files) as references to lines in
# source code (e.g. referring lines in *.c files). Such references would be
# correct (e.g. in poedit) only on machine on which *.pot was generated and
# would be of no use on other hosts (having a copy of source code located
# in different place in filesystem).
sources = '$( ${_concat( "", SOURCES, "", __env__, XgettextRPaths, TARGET' \
+ ', SOURCES)} $)'
# NOTE: the output from $XGETTEXTCOM command must go to stdout, not to a file.
# This is required by the POTUpdate builder's action.
xgettextcom = '$XGETTEXT $XGETTEXTFLAGS $_XGETTEXTPATHFLAGS' \
+ ' $_XGETTEXTFROMFLAGS -o - ' + sources
xgettextpathflags = '$( ${_concat( XGETTEXTPATHPREFIX, XGETTEXTPATH' \
+ ', XGETTEXTPATHSUFFIX, __env__, RDirs, TARGET, SOURCES)} $)'
xgettextfromflags = '$( ${_concat( XGETTEXTFROMPREFIX, XGETTEXTFROM' \
+ ', XGETTEXTFROMSUFFIX, __env__, target=TARGET, source=SOURCES)} $)'
env.SetDefault(
_XGETTEXTDOMAIN='${TARGET.filebase}',
XGETTEXTFLAGS=[],
XGETTEXTCOM=xgettextcom,
XGETTEXTCOMSTR='',
XGETTEXTPATH=[],
XGETTEXTPATHPREFIX='-D',
XGETTEXTPATHSUFFIX='',
XGETTEXTFROM=None,
XGETTEXTFROMPREFIX='-f',
XGETTEXTFROMSUFFIX='',
_XGETTEXTPATHFLAGS=xgettextpathflags,
_XGETTEXTFROMFLAGS=xgettextfromflags,
POTSUFFIX=['.pot'],
POTUPDATE_ALIAS='pot-update',
XgettextRPaths=RPaths(env)
)
env.Append(BUILDERS={
'_POTUpdateBuilder': _POTUpdateBuilder(env)
})
env.AddMethod(_POTUpdateBuilderWrapper, 'POTUpdate')
env.AlwaysBuild(env.Alias('$POTUPDATE_ALIAS')) | [
"def",
"generate",
"(",
"env",
",",
"*",
"*",
"kw",
")",
":",
"import",
"SCons",
".",
"Util",
"from",
"SCons",
".",
"Tool",
".",
"GettextCommon",
"import",
"RPaths",
",",
"_detect_xgettext",
"try",
":",
"env",
"[",
"'XGETTEXT'",
"]",
"=",
"_detect_xgette... | Generate `xgettext` tool | [
"Generate",
"xgettext",
"tool"
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/xgettext.py#L289-L342 |
23,696 | iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/gcc.py | generate | def generate(env):
"""Add Builders and construction variables for gcc to an Environment."""
if 'CC' not in env:
env['CC'] = env.Detect(compilers) or compilers[0]
cc.generate(env)
if env['PLATFORM'] in ['cygwin', 'win32']:
env['SHCCFLAGS'] = SCons.Util.CLVar('$CCFLAGS')
else:
env['SHCCFLAGS'] = SCons.Util.CLVar('$CCFLAGS -fPIC')
# determine compiler version
version = detect_version(env, env['CC'])
if version:
env['CCVERSION'] = version | python | def generate(env):
if 'CC' not in env:
env['CC'] = env.Detect(compilers) or compilers[0]
cc.generate(env)
if env['PLATFORM'] in ['cygwin', 'win32']:
env['SHCCFLAGS'] = SCons.Util.CLVar('$CCFLAGS')
else:
env['SHCCFLAGS'] = SCons.Util.CLVar('$CCFLAGS -fPIC')
# determine compiler version
version = detect_version(env, env['CC'])
if version:
env['CCVERSION'] = version | [
"def",
"generate",
"(",
"env",
")",
":",
"if",
"'CC'",
"not",
"in",
"env",
":",
"env",
"[",
"'CC'",
"]",
"=",
"env",
".",
"Detect",
"(",
"compilers",
")",
"or",
"compilers",
"[",
"0",
"]",
"cc",
".",
"generate",
"(",
"env",
")",
"if",
"env",
"[... | Add Builders and construction variables for gcc to an Environment. | [
"Add",
"Builders",
"and",
"construction",
"variables",
"for",
"gcc",
"to",
"an",
"Environment",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/gcc.py#L45-L60 |
23,697 | iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/gcc.py | detect_version | def detect_version(env, cc):
"""Return the version of the GNU compiler, or None if it is not a GNU compiler."""
cc = env.subst(cc)
if not cc:
return None
version = None
#pipe = SCons.Action._subproc(env, SCons.Util.CLVar(cc) + ['-dumpversion'],
pipe = SCons.Action._subproc(env, SCons.Util.CLVar(cc) + ['--version'],
stdin = 'devnull',
stderr = 'devnull',
stdout = subprocess.PIPE)
# -dumpversion was added in GCC 3.0. As long as we're supporting
# GCC versions older than that, we should use --version and a
# regular expression.
#line = pipe.stdout.read().strip()
#if line:
# version = line
line = SCons.Util.to_str(pipe.stdout.readline())
match = re.search(r'[0-9]+(\.[0-9]+)+', line)
if match:
version = match.group(0)
# Non-GNU compiler's output (like AIX xlc's) may exceed the stdout buffer:
# So continue with reading to let the child process actually terminate.
while SCons.Util.to_str(pipe.stdout.readline()):
pass
ret = pipe.wait()
if ret != 0:
return None
return version | python | def detect_version(env, cc):
cc = env.subst(cc)
if not cc:
return None
version = None
#pipe = SCons.Action._subproc(env, SCons.Util.CLVar(cc) + ['-dumpversion'],
pipe = SCons.Action._subproc(env, SCons.Util.CLVar(cc) + ['--version'],
stdin = 'devnull',
stderr = 'devnull',
stdout = subprocess.PIPE)
# -dumpversion was added in GCC 3.0. As long as we're supporting
# GCC versions older than that, we should use --version and a
# regular expression.
#line = pipe.stdout.read().strip()
#if line:
# version = line
line = SCons.Util.to_str(pipe.stdout.readline())
match = re.search(r'[0-9]+(\.[0-9]+)+', line)
if match:
version = match.group(0)
# Non-GNU compiler's output (like AIX xlc's) may exceed the stdout buffer:
# So continue with reading to let the child process actually terminate.
while SCons.Util.to_str(pipe.stdout.readline()):
pass
ret = pipe.wait()
if ret != 0:
return None
return version | [
"def",
"detect_version",
"(",
"env",
",",
"cc",
")",
":",
"cc",
"=",
"env",
".",
"subst",
"(",
"cc",
")",
"if",
"not",
"cc",
":",
"return",
"None",
"version",
"=",
"None",
"#pipe = SCons.Action._subproc(env, SCons.Util.CLVar(cc) + ['-dumpversion'],",
"pipe",
"="... | Return the version of the GNU compiler, or None if it is not a GNU compiler. | [
"Return",
"the",
"version",
"of",
"the",
"GNU",
"compiler",
"or",
"None",
"if",
"it",
"is",
"not",
"a",
"GNU",
"compiler",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/gcc.py#L66-L94 |
23,698 | iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/packaging/msi.py | is_dos_short_file_name | def is_dos_short_file_name(file):
""" Examine if the given file is in the 8.3 form.
"""
fname, ext = os.path.splitext(file)
proper_ext = len(ext) == 0 or (2 <= len(ext) <= 4) # the ext contains the dot
proper_fname = file.isupper() and len(fname) <= 8
return proper_ext and proper_fname | python | def is_dos_short_file_name(file):
fname, ext = os.path.splitext(file)
proper_ext = len(ext) == 0 or (2 <= len(ext) <= 4) # the ext contains the dot
proper_fname = file.isupper() and len(fname) <= 8
return proper_ext and proper_fname | [
"def",
"is_dos_short_file_name",
"(",
"file",
")",
":",
"fname",
",",
"ext",
"=",
"os",
".",
"path",
".",
"splitext",
"(",
"file",
")",
"proper_ext",
"=",
"len",
"(",
"ext",
")",
"==",
"0",
"or",
"(",
"2",
"<=",
"len",
"(",
"ext",
")",
"<=",
"4",... | Examine if the given file is in the 8.3 form. | [
"Examine",
"if",
"the",
"given",
"file",
"is",
"in",
"the",
"8",
".",
"3",
"form",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/packaging/msi.py#L81-L88 |
23,699 | iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/packaging/msi.py | create_feature_dict | def create_feature_dict(files):
""" X_MSI_FEATURE and doc FileTag's can be used to collect files in a
hierarchy. This function collects the files into this hierarchy.
"""
dict = {}
def add_to_dict( feature, file ):
if not SCons.Util.is_List( feature ):
feature = [ feature ]
for f in feature:
if f not in dict:
dict[ f ] = [ file ]
else:
dict[ f ].append( file )
for file in files:
if hasattr( file, 'PACKAGING_X_MSI_FEATURE' ):
add_to_dict(file.PACKAGING_X_MSI_FEATURE, file)
elif hasattr( file, 'PACKAGING_DOC' ):
add_to_dict( 'PACKAGING_DOC', file )
else:
add_to_dict( 'default', file )
return dict | python | def create_feature_dict(files):
dict = {}
def add_to_dict( feature, file ):
if not SCons.Util.is_List( feature ):
feature = [ feature ]
for f in feature:
if f not in dict:
dict[ f ] = [ file ]
else:
dict[ f ].append( file )
for file in files:
if hasattr( file, 'PACKAGING_X_MSI_FEATURE' ):
add_to_dict(file.PACKAGING_X_MSI_FEATURE, file)
elif hasattr( file, 'PACKAGING_DOC' ):
add_to_dict( 'PACKAGING_DOC', file )
else:
add_to_dict( 'default', file )
return dict | [
"def",
"create_feature_dict",
"(",
"files",
")",
":",
"dict",
"=",
"{",
"}",
"def",
"add_to_dict",
"(",
"feature",
",",
"file",
")",
":",
"if",
"not",
"SCons",
".",
"Util",
".",
"is_List",
"(",
"feature",
")",
":",
"feature",
"=",
"[",
"feature",
"]"... | X_MSI_FEATURE and doc FileTag's can be used to collect files in a
hierarchy. This function collects the files into this hierarchy. | [
"X_MSI_FEATURE",
"and",
"doc",
"FileTag",
"s",
"can",
"be",
"used",
"to",
"collect",
"files",
"in",
"a",
"hierarchy",
".",
"This",
"function",
"collects",
"the",
"files",
"into",
"this",
"hierarchy",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/packaging/msi.py#L128-L152 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.