repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
listlengths
20
707
docstring
stringlengths
3
17.3k
docstring_tokens
listlengths
3
222
sha
stringlengths
40
40
url
stringlengths
87
242
partition
stringclasses
1 value
idx
int64
0
252k
project-rig/rig
rig/geometry.py
spinn5_chip_coord
def spinn5_chip_coord(x, y, root_x=0, root_y=0): """Get the coordinates of a chip on its board. Given the coordinates of a chip in a multi-board system, calculates the coordinates of the chip within its board. .. note:: This function assumes the system is constructed from SpiNN-5 boards Parameters ---------- x, y : int The coordinates of the chip of interest root_x, root_y : int The coordinates of the root chip (i.e. the chip used to boot the machine), e.g. from :py:attr:`rig.machine_control.MachineController.root_chip`. """ dx, dy = SPINN5_ETH_OFFSET[(y - root_y) % 12][(x - root_x) % 12] return (-int(dx), -int(dy))
python
def spinn5_chip_coord(x, y, root_x=0, root_y=0): """Get the coordinates of a chip on its board. Given the coordinates of a chip in a multi-board system, calculates the coordinates of the chip within its board. .. note:: This function assumes the system is constructed from SpiNN-5 boards Parameters ---------- x, y : int The coordinates of the chip of interest root_x, root_y : int The coordinates of the root chip (i.e. the chip used to boot the machine), e.g. from :py:attr:`rig.machine_control.MachineController.root_chip`. """ dx, dy = SPINN5_ETH_OFFSET[(y - root_y) % 12][(x - root_x) % 12] return (-int(dx), -int(dy))
[ "def", "spinn5_chip_coord", "(", "x", ",", "y", ",", "root_x", "=", "0", ",", "root_y", "=", "0", ")", ":", "dx", ",", "dy", "=", "SPINN5_ETH_OFFSET", "[", "(", "y", "-", "root_y", ")", "%", "12", "]", "[", "(", "x", "-", "root_x", ")", "%", ...
Get the coordinates of a chip on its board. Given the coordinates of a chip in a multi-board system, calculates the coordinates of the chip within its board. .. note:: This function assumes the system is constructed from SpiNN-5 boards Parameters ---------- x, y : int The coordinates of the chip of interest root_x, root_y : int The coordinates of the root chip (i.e. the chip used to boot the machine), e.g. from :py:attr:`rig.machine_control.MachineController.root_chip`.
[ "Get", "the", "coordinates", "of", "a", "chip", "on", "its", "board", "." ]
3a3e053d3214899b6d68758685835de0afd5542b
https://github.com/project-rig/rig/blob/3a3e053d3214899b6d68758685835de0afd5542b/rig/geometry.py#L405-L424
train
50,500
project-rig/rig
rig/geometry.py
spinn5_fpga_link
def spinn5_fpga_link(x, y, link, root_x=0, root_y=0): """Get the identity of the FPGA link which corresponds with the supplied link. .. note:: This function assumes the system is constructed from SpiNN-5 boards whose FPGAs are loaded with the SpI/O 'spinnaker_fpgas' image. Parameters ---------- x, y : int The chip whose link is of interest. link : :py:class:`~rig.links.Link` The link of interest. root_x, root_y : int The coordinates of the root chip (i.e. the chip used to boot the machine), e.g. from :py:attr:`rig.machine_control.MachineController.root_chip`. Returns ------- (fpga_num, link_num) or None If not None, the link supplied passes through an FPGA link. The returned tuple indicates the FPGA responsible for the sending-side of the link. `fpga_num` is the number (0, 1 or 2) of the FPGA responsible for the link. `link_num` indicates which of the sixteen SpiNNaker links (0 to 15) into an FPGA is being used. Links 0-7 are typically handled by S-ATA link 0 and 8-15 are handled by S-ATA link 1. Returns None if the supplied link does not pass through an FPGA. """ x, y = spinn5_chip_coord(x, y, root_x, root_y) return SPINN5_FPGA_LINKS.get((x, y, link))
python
def spinn5_fpga_link(x, y, link, root_x=0, root_y=0): """Get the identity of the FPGA link which corresponds with the supplied link. .. note:: This function assumes the system is constructed from SpiNN-5 boards whose FPGAs are loaded with the SpI/O 'spinnaker_fpgas' image. Parameters ---------- x, y : int The chip whose link is of interest. link : :py:class:`~rig.links.Link` The link of interest. root_x, root_y : int The coordinates of the root chip (i.e. the chip used to boot the machine), e.g. from :py:attr:`rig.machine_control.MachineController.root_chip`. Returns ------- (fpga_num, link_num) or None If not None, the link supplied passes through an FPGA link. The returned tuple indicates the FPGA responsible for the sending-side of the link. `fpga_num` is the number (0, 1 or 2) of the FPGA responsible for the link. `link_num` indicates which of the sixteen SpiNNaker links (0 to 15) into an FPGA is being used. Links 0-7 are typically handled by S-ATA link 0 and 8-15 are handled by S-ATA link 1. Returns None if the supplied link does not pass through an FPGA. """ x, y = spinn5_chip_coord(x, y, root_x, root_y) return SPINN5_FPGA_LINKS.get((x, y, link))
[ "def", "spinn5_fpga_link", "(", "x", ",", "y", ",", "link", ",", "root_x", "=", "0", ",", "root_y", "=", "0", ")", ":", "x", ",", "y", "=", "spinn5_chip_coord", "(", "x", ",", "y", ",", "root_x", ",", "root_y", ")", "return", "SPINN5_FPGA_LINKS", "...
Get the identity of the FPGA link which corresponds with the supplied link. .. note:: This function assumes the system is constructed from SpiNN-5 boards whose FPGAs are loaded with the SpI/O 'spinnaker_fpgas' image. Parameters ---------- x, y : int The chip whose link is of interest. link : :py:class:`~rig.links.Link` The link of interest. root_x, root_y : int The coordinates of the root chip (i.e. the chip used to boot the machine), e.g. from :py:attr:`rig.machine_control.MachineController.root_chip`. Returns ------- (fpga_num, link_num) or None If not None, the link supplied passes through an FPGA link. The returned tuple indicates the FPGA responsible for the sending-side of the link. `fpga_num` is the number (0, 1 or 2) of the FPGA responsible for the link. `link_num` indicates which of the sixteen SpiNNaker links (0 to 15) into an FPGA is being used. Links 0-7 are typically handled by S-ATA link 0 and 8-15 are handled by S-ATA link 1. Returns None if the supplied link does not pass through an FPGA.
[ "Get", "the", "identity", "of", "the", "FPGA", "link", "which", "corresponds", "with", "the", "supplied", "link", "." ]
3a3e053d3214899b6d68758685835de0afd5542b
https://github.com/project-rig/rig/blob/3a3e053d3214899b6d68758685835de0afd5542b/rig/geometry.py#L427-L463
train
50,501
Metatab/metapack
metapack/package/csv.py
CsvPackageBuilder._load_resource
def _load_resource(self, source_r, abs_path=False): """The CSV package has no reseources, so we just need to resolve the URLs to them. Usually, the CSV package is built from a file system ackage on a publically acessible server. """ r = self.doc.resource(source_r.name) r.url = self.resource_root.join(r.url).inner
python
def _load_resource(self, source_r, abs_path=False): """The CSV package has no reseources, so we just need to resolve the URLs to them. Usually, the CSV package is built from a file system ackage on a publically acessible server. """ r = self.doc.resource(source_r.name) r.url = self.resource_root.join(r.url).inner
[ "def", "_load_resource", "(", "self", ",", "source_r", ",", "abs_path", "=", "False", ")", ":", "r", "=", "self", ".", "doc", ".", "resource", "(", "source_r", ".", "name", ")", "r", ".", "url", "=", "self", ".", "resource_root", ".", "join", "(", ...
The CSV package has no reseources, so we just need to resolve the URLs to them. Usually, the CSV package is built from a file system ackage on a publically acessible server.
[ "The", "CSV", "package", "has", "no", "reseources", "so", "we", "just", "need", "to", "resolve", "the", "URLs", "to", "them", ".", "Usually", "the", "CSV", "package", "is", "built", "from", "a", "file", "system", "ackage", "on", "a", "publically", "acess...
8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6
https://github.com/Metatab/metapack/blob/8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6/metapack/package/csv.py#L51-L57
train
50,502
NicolasLM/spinach
spinach/engine.py
Engine.attach_tasks
def attach_tasks(self, tasks: Tasks): """Attach a set of tasks. A task cannot be scheduled or executed before it is attached to an Engine. >>> tasks = Tasks() >>> spin.attach_tasks(tasks) """ if tasks._spin is not None and tasks._spin is not self: logger.warning('Tasks already attached to a different Engine') self._tasks.update(tasks) tasks._spin = self
python
def attach_tasks(self, tasks: Tasks): """Attach a set of tasks. A task cannot be scheduled or executed before it is attached to an Engine. >>> tasks = Tasks() >>> spin.attach_tasks(tasks) """ if tasks._spin is not None and tasks._spin is not self: logger.warning('Tasks already attached to a different Engine') self._tasks.update(tasks) tasks._spin = self
[ "def", "attach_tasks", "(", "self", ",", "tasks", ":", "Tasks", ")", ":", "if", "tasks", ".", "_spin", "is", "not", "None", "and", "tasks", ".", "_spin", "is", "not", "self", ":", "logger", ".", "warning", "(", "'Tasks already attached to a different Engine'...
Attach a set of tasks. A task cannot be scheduled or executed before it is attached to an Engine. >>> tasks = Tasks() >>> spin.attach_tasks(tasks)
[ "Attach", "a", "set", "of", "tasks", "." ]
0122f916643101eab5cdc1f3da662b9446e372aa
https://github.com/NicolasLM/spinach/blob/0122f916643101eab5cdc1f3da662b9446e372aa/spinach/engine.py#L47-L59
train
50,503
NicolasLM/spinach
spinach/engine.py
Engine.schedule_batch
def schedule_batch(self, batch: Batch): """Schedule many jobs at once. Scheduling jobs in batches allows to enqueue them fast by avoiding round-trips to the broker. :arg batch: :class:`Batch` instance containing jobs to schedule """ jobs = list() for task, at, args, kwargs in batch.jobs_to_create: task = self._tasks.get(task) jobs.append( Job(task.name, task.queue, at, task.max_retries, task_args=args, task_kwargs=kwargs) ) return self._broker.enqueue_jobs(jobs)
python
def schedule_batch(self, batch: Batch): """Schedule many jobs at once. Scheduling jobs in batches allows to enqueue them fast by avoiding round-trips to the broker. :arg batch: :class:`Batch` instance containing jobs to schedule """ jobs = list() for task, at, args, kwargs in batch.jobs_to_create: task = self._tasks.get(task) jobs.append( Job(task.name, task.queue, at, task.max_retries, task_args=args, task_kwargs=kwargs) ) return self._broker.enqueue_jobs(jobs)
[ "def", "schedule_batch", "(", "self", ",", "batch", ":", "Batch", ")", ":", "jobs", "=", "list", "(", ")", "for", "task", ",", "at", ",", "args", ",", "kwargs", "in", "batch", ".", "jobs_to_create", ":", "task", "=", "self", ".", "_tasks", ".", "ge...
Schedule many jobs at once. Scheduling jobs in batches allows to enqueue them fast by avoiding round-trips to the broker. :arg batch: :class:`Batch` instance containing jobs to schedule
[ "Schedule", "many", "jobs", "at", "once", "." ]
0122f916643101eab5cdc1f3da662b9446e372aa
https://github.com/NicolasLM/spinach/blob/0122f916643101eab5cdc1f3da662b9446e372aa/spinach/engine.py#L90-L105
train
50,504
NicolasLM/spinach
spinach/engine.py
Engine.start_workers
def start_workers(self, number: int=DEFAULT_WORKER_NUMBER, queue=DEFAULT_QUEUE, block=True, stop_when_queue_empty=False): """Start the worker threads. :arg number: number of worker threads to launch :arg queue: name of the queue to consume, see :doc:`queues` :arg block: whether to block the calling thread until a signal arrives and workers get terminated :arg stop_when_queue_empty: automatically stop the workers when the queue is empty. Useful mostly for one-off scripts and testing. """ if self._arbiter or self._workers: raise RuntimeError('Workers are already running') self._working_queue = queue tasks_names = '\n'.join( [' - ' + task.name for task in self._tasks.tasks.values() if task.queue == self._working_queue] ) logger.info('Starting %d workers on queue "%s" with tasks:\n%s', number, self._working_queue, tasks_names) # Start the broker self._broker.start() # Start workers self._workers = Workers( num_workers=number, namespace=self.namespace, ) # Start the result notifier self._result_notifier = threading.Thread( target=run_forever, args=(self._result_notifier_func, self._must_stop, logger), name='{}-result-notifier'.format(self.namespace) ) self._result_notifier.start() # Start the arbiter self._arbiter = threading.Thread( target=run_forever, args=(self._arbiter_func, self._must_stop, logger, stop_when_queue_empty), name='{}-arbiter'.format(self.namespace) ) self._arbiter.start() if block: with handle_sigterm(): try: self._arbiter.join() except KeyboardInterrupt: self.stop_workers() except AttributeError: # Arbiter thread starts and stops immediately when ran with # `stop_when_queue_empty` and queue is already empty. pass
python
def start_workers(self, number: int=DEFAULT_WORKER_NUMBER, queue=DEFAULT_QUEUE, block=True, stop_when_queue_empty=False): """Start the worker threads. :arg number: number of worker threads to launch :arg queue: name of the queue to consume, see :doc:`queues` :arg block: whether to block the calling thread until a signal arrives and workers get terminated :arg stop_when_queue_empty: automatically stop the workers when the queue is empty. Useful mostly for one-off scripts and testing. """ if self._arbiter or self._workers: raise RuntimeError('Workers are already running') self._working_queue = queue tasks_names = '\n'.join( [' - ' + task.name for task in self._tasks.tasks.values() if task.queue == self._working_queue] ) logger.info('Starting %d workers on queue "%s" with tasks:\n%s', number, self._working_queue, tasks_names) # Start the broker self._broker.start() # Start workers self._workers = Workers( num_workers=number, namespace=self.namespace, ) # Start the result notifier self._result_notifier = threading.Thread( target=run_forever, args=(self._result_notifier_func, self._must_stop, logger), name='{}-result-notifier'.format(self.namespace) ) self._result_notifier.start() # Start the arbiter self._arbiter = threading.Thread( target=run_forever, args=(self._arbiter_func, self._must_stop, logger, stop_when_queue_empty), name='{}-arbiter'.format(self.namespace) ) self._arbiter.start() if block: with handle_sigterm(): try: self._arbiter.join() except KeyboardInterrupt: self.stop_workers() except AttributeError: # Arbiter thread starts and stops immediately when ran with # `stop_when_queue_empty` and queue is already empty. pass
[ "def", "start_workers", "(", "self", ",", "number", ":", "int", "=", "DEFAULT_WORKER_NUMBER", ",", "queue", "=", "DEFAULT_QUEUE", ",", "block", "=", "True", ",", "stop_when_queue_empty", "=", "False", ")", ":", "if", "self", ".", "_arbiter", "or", "self", ...
Start the worker threads. :arg number: number of worker threads to launch :arg queue: name of the queue to consume, see :doc:`queues` :arg block: whether to block the calling thread until a signal arrives and workers get terminated :arg stop_when_queue_empty: automatically stop the workers when the queue is empty. Useful mostly for one-off scripts and testing.
[ "Start", "the", "worker", "threads", "." ]
0122f916643101eab5cdc1f3da662b9446e372aa
https://github.com/NicolasLM/spinach/blob/0122f916643101eab5cdc1f3da662b9446e372aa/spinach/engine.py#L148-L207
train
50,505
NicolasLM/spinach
spinach/engine.py
Engine.stop_workers
def stop_workers(self, _join_arbiter=True): """Stop the workers and wait for them to terminate.""" # _join_arbiter is used internally when the arbiter is shutting down # the full engine itself. This is because the arbiter thread cannot # join itself. self._must_stop.set() self._workers.stop() self._result_notifier.join() self._broker.stop() if _join_arbiter: self._arbiter.join() self._reset()
python
def stop_workers(self, _join_arbiter=True): """Stop the workers and wait for them to terminate.""" # _join_arbiter is used internally when the arbiter is shutting down # the full engine itself. This is because the arbiter thread cannot # join itself. self._must_stop.set() self._workers.stop() self._result_notifier.join() self._broker.stop() if _join_arbiter: self._arbiter.join() self._reset()
[ "def", "stop_workers", "(", "self", ",", "_join_arbiter", "=", "True", ")", ":", "# _join_arbiter is used internally when the arbiter is shutting down", "# the full engine itself. This is because the arbiter thread cannot", "# join itself.", "self", ".", "_must_stop", ".", "set", ...
Stop the workers and wait for them to terminate.
[ "Stop", "the", "workers", "and", "wait", "for", "them", "to", "terminate", "." ]
0122f916643101eab5cdc1f3da662b9446e372aa
https://github.com/NicolasLM/spinach/blob/0122f916643101eab5cdc1f3da662b9446e372aa/spinach/engine.py#L209-L220
train
50,506
openstack/networking-hyperv
networking_hyperv/neutron/trunk_driver.py
HyperVTrunkDriver.handle_trunks
def handle_trunks(self, trunks, event_type): """Trunk data model change from the server.""" LOG.debug("Trunks event received: %(event_type)s. Trunks: %(trunks)s", {'event_type': event_type, 'trunks': trunks}) if event_type == events.DELETED: # The port trunks have been deleted. Remove them from cache. for trunk in trunks: self._trunks.pop(trunk.id, None) else: for trunk in trunks: self._trunks[trunk.id] = trunk self._setup_trunk(trunk)
python
def handle_trunks(self, trunks, event_type): """Trunk data model change from the server.""" LOG.debug("Trunks event received: %(event_type)s. Trunks: %(trunks)s", {'event_type': event_type, 'trunks': trunks}) if event_type == events.DELETED: # The port trunks have been deleted. Remove them from cache. for trunk in trunks: self._trunks.pop(trunk.id, None) else: for trunk in trunks: self._trunks[trunk.id] = trunk self._setup_trunk(trunk)
[ "def", "handle_trunks", "(", "self", ",", "trunks", ",", "event_type", ")", ":", "LOG", ".", "debug", "(", "\"Trunks event received: %(event_type)s. Trunks: %(trunks)s\"", ",", "{", "'event_type'", ":", "event_type", ",", "'trunks'", ":", "trunks", "}", ")", "if",...
Trunk data model change from the server.
[ "Trunk", "data", "model", "change", "from", "the", "server", "." ]
7a89306ab0586c95b99debb44d898f70834508b9
https://github.com/openstack/networking-hyperv/blob/7a89306ab0586c95b99debb44d898f70834508b9/networking_hyperv/neutron/trunk_driver.py#L46-L59
train
50,507
openstack/networking-hyperv
networking_hyperv/neutron/trunk_driver.py
HyperVTrunkDriver.handle_subports
def handle_subports(self, subports, event_type): """Subport data model change from the server.""" LOG.debug("Subports event received: %(event_type)s. " "Subports: %(subports)s", {'event_type': event_type, 'subports': subports}) # update the cache. if event_type == events.CREATED: for subport in subports: trunk = self._trunks.get(subport['trunk_id']) if trunk: trunk.sub_ports.append(subport) elif event_type == events.DELETED: for subport in subports: trunk = self._trunks.get(subport['trunk_id']) if trunk and subport in trunk.sub_ports: trunk.sub_ports.remove(subport) # update the bound trunks. affected_trunk_ids = set([s['trunk_id'] for s in subports]) for trunk_id in affected_trunk_ids: trunk = self._trunks.get(trunk_id) if trunk: self._setup_trunk(trunk)
python
def handle_subports(self, subports, event_type): """Subport data model change from the server.""" LOG.debug("Subports event received: %(event_type)s. " "Subports: %(subports)s", {'event_type': event_type, 'subports': subports}) # update the cache. if event_type == events.CREATED: for subport in subports: trunk = self._trunks.get(subport['trunk_id']) if trunk: trunk.sub_ports.append(subport) elif event_type == events.DELETED: for subport in subports: trunk = self._trunks.get(subport['trunk_id']) if trunk and subport in trunk.sub_ports: trunk.sub_ports.remove(subport) # update the bound trunks. affected_trunk_ids = set([s['trunk_id'] for s in subports]) for trunk_id in affected_trunk_ids: trunk = self._trunks.get(trunk_id) if trunk: self._setup_trunk(trunk)
[ "def", "handle_subports", "(", "self", ",", "subports", ",", "event_type", ")", ":", "LOG", ".", "debug", "(", "\"Subports event received: %(event_type)s. \"", "\"Subports: %(subports)s\"", ",", "{", "'event_type'", ":", "event_type", ",", "'subports'", ":", "subports...
Subport data model change from the server.
[ "Subport", "data", "model", "change", "from", "the", "server", "." ]
7a89306ab0586c95b99debb44d898f70834508b9
https://github.com/openstack/networking-hyperv/blob/7a89306ab0586c95b99debb44d898f70834508b9/networking_hyperv/neutron/trunk_driver.py#L61-L85
train
50,508
openstack/networking-hyperv
networking_hyperv/neutron/trunk_driver.py
HyperVTrunkDriver._setup_trunk
def _setup_trunk(self, trunk, vlan_id=None): """Sets up VLAN trunk and updates the trunk status.""" LOG.info('Binding trunk port: %s.', trunk) try: # bind sub_ports to host. self._trunk_rpc.update_subport_bindings(self._context, trunk.sub_ports) vlan_trunk = [s.segmentation_id for s in trunk.sub_ports] self._set_port_vlan(trunk.port_id, vlan_id, vlan_trunk) self._trunk_rpc.update_trunk_status(self._context, trunk.id, t_const.ACTIVE_STATUS) except Exception: # something broke LOG.exception("Failure setting up subports for %s", trunk.port_id) self._trunk_rpc.update_trunk_status(self._context, trunk.id, t_const.DEGRADED_STATUS)
python
def _setup_trunk(self, trunk, vlan_id=None): """Sets up VLAN trunk and updates the trunk status.""" LOG.info('Binding trunk port: %s.', trunk) try: # bind sub_ports to host. self._trunk_rpc.update_subport_bindings(self._context, trunk.sub_ports) vlan_trunk = [s.segmentation_id for s in trunk.sub_ports] self._set_port_vlan(trunk.port_id, vlan_id, vlan_trunk) self._trunk_rpc.update_trunk_status(self._context, trunk.id, t_const.ACTIVE_STATUS) except Exception: # something broke LOG.exception("Failure setting up subports for %s", trunk.port_id) self._trunk_rpc.update_trunk_status(self._context, trunk.id, t_const.DEGRADED_STATUS)
[ "def", "_setup_trunk", "(", "self", ",", "trunk", ",", "vlan_id", "=", "None", ")", ":", "LOG", ".", "info", "(", "'Binding trunk port: %s.'", ",", "trunk", ")", "try", ":", "# bind sub_ports to host.", "self", ".", "_trunk_rpc", ".", "update_subport_bindings", ...
Sets up VLAN trunk and updates the trunk status.
[ "Sets", "up", "VLAN", "trunk", "and", "updates", "the", "trunk", "status", "." ]
7a89306ab0586c95b99debb44d898f70834508b9
https://github.com/openstack/networking-hyperv/blob/7a89306ab0586c95b99debb44d898f70834508b9/networking_hyperv/neutron/trunk_driver.py#L115-L133
train
50,509
openstack/networking-hyperv
networking_hyperv/neutron/agent/hnv_metadata_agent.py
main
def main(): """The entry point for neutron-hnv-metadata-proxy.""" register_config_opts() common_config.init(sys.argv[1:]) neutron_config.setup_logging() proxy = MetadataProxy() proxy.run()
python
def main(): """The entry point for neutron-hnv-metadata-proxy.""" register_config_opts() common_config.init(sys.argv[1:]) neutron_config.setup_logging() proxy = MetadataProxy() proxy.run()
[ "def", "main", "(", ")", ":", "register_config_opts", "(", ")", "common_config", ".", "init", "(", "sys", ".", "argv", "[", "1", ":", "]", ")", "neutron_config", ".", "setup_logging", "(", ")", "proxy", "=", "MetadataProxy", "(", ")", "proxy", ".", "ru...
The entry point for neutron-hnv-metadata-proxy.
[ "The", "entry", "point", "for", "neutron", "-", "hnv", "-", "metadata", "-", "proxy", "." ]
7a89306ab0586c95b99debb44d898f70834508b9
https://github.com/openstack/networking-hyperv/blob/7a89306ab0586c95b99debb44d898f70834508b9/networking_hyperv/neutron/agent/hnv_metadata_agent.py#L227-L233
train
50,510
openstack/networking-hyperv
networking_hyperv/neutron/agent/hnv_metadata_agent.py
_MetadataProxyHandler._get_port_profile_id
def _get_port_profile_id(self, request): """Get the port profile ID from the request path.""" # Note(alexcoman): The port profile ID can be found as suffix # in request path. port_profile_id = request.path.split("/")[-1].strip() if uuidutils.is_uuid_like(port_profile_id): LOG.debug("The instance id was found in request path.") return port_profile_id LOG.debug("Failed to get the instance id from the request.") return None
python
def _get_port_profile_id(self, request): """Get the port profile ID from the request path.""" # Note(alexcoman): The port profile ID can be found as suffix # in request path. port_profile_id = request.path.split("/")[-1].strip() if uuidutils.is_uuid_like(port_profile_id): LOG.debug("The instance id was found in request path.") return port_profile_id LOG.debug("Failed to get the instance id from the request.") return None
[ "def", "_get_port_profile_id", "(", "self", ",", "request", ")", ":", "# Note(alexcoman): The port profile ID can be found as suffix", "# in request path.", "port_profile_id", "=", "request", ".", "path", ".", "split", "(", "\"/\"", ")", "[", "-", "1", "]", ".", "st...
Get the port profile ID from the request path.
[ "Get", "the", "port", "profile", "ID", "from", "the", "request", "path", "." ]
7a89306ab0586c95b99debb44d898f70834508b9
https://github.com/openstack/networking-hyperv/blob/7a89306ab0586c95b99debb44d898f70834508b9/networking_hyperv/neutron/agent/hnv_metadata_agent.py#L63-L74
train
50,511
openstack/networking-hyperv
networking_hyperv/neutron/agent/hnv_metadata_agent.py
MetadataProxy._work
def _work(self): """Start the neutron-hnv-metadata-proxy agent.""" server = wsgi.Server( name=self._AGENT_BINARY, num_threads=CONF.AGENT.worker_count) server.start( application=_MetadataProxyHandler(), port=CONF.bind_port, host=CONF.bind_host) server.wait()
python
def _work(self): """Start the neutron-hnv-metadata-proxy agent.""" server = wsgi.Server( name=self._AGENT_BINARY, num_threads=CONF.AGENT.worker_count) server.start( application=_MetadataProxyHandler(), port=CONF.bind_port, host=CONF.bind_host) server.wait()
[ "def", "_work", "(", "self", ")", ":", "server", "=", "wsgi", ".", "Server", "(", "name", "=", "self", ".", "_AGENT_BINARY", ",", "num_threads", "=", "CONF", ".", "AGENT", ".", "worker_count", ")", "server", ".", "start", "(", "application", "=", "_Met...
Start the neutron-hnv-metadata-proxy agent.
[ "Start", "the", "neutron", "-", "hnv", "-", "metadata", "-", "proxy", "agent", "." ]
7a89306ab0586c95b99debb44d898f70834508b9
https://github.com/openstack/networking-hyperv/blob/7a89306ab0586c95b99debb44d898f70834508b9/networking_hyperv/neutron/agent/hnv_metadata_agent.py#L202-L211
train
50,512
project-rig/rig
rig/place_and_route/route/utils.py
links_between
def links_between(a, b, machine): """Get the set of working links connecting chips a and b. Parameters ---------- a : (x, y) b : (x, y) machine : :py:class:`~rig.place_and_route.Machine` Returns ------- set([:py:class:`~rig.links.Links`, ...]) """ ax, ay = a bx, by = b return set(link for link, (dx, dy) in ((l, l.to_vector()) for l in Links) if (ax + dx) % machine.width == bx and (ay + dy) % machine.height == by and (ax, ay, link) in machine)
python
def links_between(a, b, machine): """Get the set of working links connecting chips a and b. Parameters ---------- a : (x, y) b : (x, y) machine : :py:class:`~rig.place_and_route.Machine` Returns ------- set([:py:class:`~rig.links.Links`, ...]) """ ax, ay = a bx, by = b return set(link for link, (dx, dy) in ((l, l.to_vector()) for l in Links) if (ax + dx) % machine.width == bx and (ay + dy) % machine.height == by and (ax, ay, link) in machine)
[ "def", "links_between", "(", "a", ",", "b", ",", "machine", ")", ":", "ax", ",", "ay", "=", "a", "bx", ",", "by", "=", "b", "return", "set", "(", "link", "for", "link", ",", "(", "dx", ",", "dy", ")", "in", "(", "(", "l", ",", "l", ".", "...
Get the set of working links connecting chips a and b. Parameters ---------- a : (x, y) b : (x, y) machine : :py:class:`~rig.place_and_route.Machine` Returns ------- set([:py:class:`~rig.links.Links`, ...])
[ "Get", "the", "set", "of", "working", "links", "connecting", "chips", "a", "and", "b", "." ]
3a3e053d3214899b6d68758685835de0afd5542b
https://github.com/project-rig/rig/blob/3a3e053d3214899b6d68758685835de0afd5542b/rig/place_and_route/route/utils.py#L76-L94
train
50,513
Metatab/metapack
metapack/package/s3.py
set_s3_profile
def set_s3_profile(profile_name): """Load the credentials for an s3 profile into environmental variables""" import os session = boto3.Session(profile_name=profile_name) os.environ['AWS_ACCESS_KEY_ID'] = session.get_credentials().access_key os.environ['AWS_SECRET_ACCESS_KEY'] = session.get_credentials().secret_key
python
def set_s3_profile(profile_name): """Load the credentials for an s3 profile into environmental variables""" import os session = boto3.Session(profile_name=profile_name) os.environ['AWS_ACCESS_KEY_ID'] = session.get_credentials().access_key os.environ['AWS_SECRET_ACCESS_KEY'] = session.get_credentials().secret_key
[ "def", "set_s3_profile", "(", "profile_name", ")", ":", "import", "os", "session", "=", "boto3", ".", "Session", "(", "profile_name", "=", "profile_name", ")", "os", ".", "environ", "[", "'AWS_ACCESS_KEY_ID'", "]", "=", "session", ".", "get_credentials", "(", ...
Load the credentials for an s3 profile into environmental variables
[ "Load", "the", "credentials", "for", "an", "s3", "profile", "into", "environmental", "variables" ]
8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6
https://github.com/Metatab/metapack/blob/8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6/metapack/package/s3.py#L158-L165
train
50,514
NicolasLM/spinach
spinach/signals.py
SafeNamedSignal.send
def send(self, *sender, **kwargs): """Emit this signal on behalf of `sender`, passing on kwargs. This is an extension of `Signal.send` that changes one thing: Exceptions raised in calling the receiver are logged but do not fail """ if len(sender) == 0: sender = None elif len(sender) > 1: raise TypeError('send() accepts only one positional argument, ' '%s given' % len(sender)) else: sender = sender[0] if not self.receivers: return [] rv = list() for receiver in self.receivers_for(sender): try: rv.append((receiver, receiver(sender, **kwargs))) except Exception: logger.exception('Error while dispatching signal "{}" ' 'to receiver'.format(self.name)) return rv
python
def send(self, *sender, **kwargs): """Emit this signal on behalf of `sender`, passing on kwargs. This is an extension of `Signal.send` that changes one thing: Exceptions raised in calling the receiver are logged but do not fail """ if len(sender) == 0: sender = None elif len(sender) > 1: raise TypeError('send() accepts only one positional argument, ' '%s given' % len(sender)) else: sender = sender[0] if not self.receivers: return [] rv = list() for receiver in self.receivers_for(sender): try: rv.append((receiver, receiver(sender, **kwargs))) except Exception: logger.exception('Error while dispatching signal "{}" ' 'to receiver'.format(self.name)) return rv
[ "def", "send", "(", "self", ",", "*", "sender", ",", "*", "*", "kwargs", ")", ":", "if", "len", "(", "sender", ")", "==", "0", ":", "sender", "=", "None", "elif", "len", "(", "sender", ")", ">", "1", ":", "raise", "TypeError", "(", "'send() accep...
Emit this signal on behalf of `sender`, passing on kwargs. This is an extension of `Signal.send` that changes one thing: Exceptions raised in calling the receiver are logged but do not fail
[ "Emit", "this", "signal", "on", "behalf", "of", "sender", "passing", "on", "kwargs", "." ]
0122f916643101eab5cdc1f3da662b9446e372aa
https://github.com/NicolasLM/spinach/blob/0122f916643101eab5cdc1f3da662b9446e372aa/spinach/signals.py#L16-L40
train
50,515
Metatab/metapack
metapack/jupyter/exporters.py
DocumentationExporter.update_metatab
def update_metatab(self, doc, resources): """Add documentation entries for resources""" if not 'Documentation' in doc: doc.new_section("Documentation") ds = doc['Documentation'] if not 'Name' in ds.args: ds.add_arg('Name', prepend=True) # This is the main output from the HTML exporter, not a resource. ds.new_term('Root.Documentation', 'docs/notebook.html', name="notebook.html", title='Jupyter Notebook (HTML)') for name, data in resources.get('outputs', {}).items(): if name == 'documentation.html': ds.new_term('Root.Documentation', 'docs/' + name, name=name, title='Primary Documentation (HTML)') elif name == 'html_basic_body.html': pass elif name.endswith('.html'): ds.new_term('Root.Documentation', 'docs/' + name, name=name, title='Documentation (HTML)') elif name.endswith('.md'): ds.new_term('Root.Documentation', 'docs/' + name, name=name, title='Documentation (Markdown)') elif name.endswith('.pdf'): ds.new_term('Root.Documentation', 'docs/' + name, name=name, title='Documentation (PDF)') elif name.endswith('.png'): ds.new_term('Root.Image', 'docs/' + name, name=name, title='Image for HTML Documentation') else: pass
python
def update_metatab(self, doc, resources): """Add documentation entries for resources""" if not 'Documentation' in doc: doc.new_section("Documentation") ds = doc['Documentation'] if not 'Name' in ds.args: ds.add_arg('Name', prepend=True) # This is the main output from the HTML exporter, not a resource. ds.new_term('Root.Documentation', 'docs/notebook.html', name="notebook.html", title='Jupyter Notebook (HTML)') for name, data in resources.get('outputs', {}).items(): if name == 'documentation.html': ds.new_term('Root.Documentation', 'docs/' + name, name=name, title='Primary Documentation (HTML)') elif name == 'html_basic_body.html': pass elif name.endswith('.html'): ds.new_term('Root.Documentation', 'docs/' + name, name=name, title='Documentation (HTML)') elif name.endswith('.md'): ds.new_term('Root.Documentation', 'docs/' + name, name=name, title='Documentation (Markdown)') elif name.endswith('.pdf'): ds.new_term('Root.Documentation', 'docs/' + name, name=name, title='Documentation (PDF)') elif name.endswith('.png'): ds.new_term('Root.Image', 'docs/' + name, name=name, title='Image for HTML Documentation') else: pass
[ "def", "update_metatab", "(", "self", ",", "doc", ",", "resources", ")", ":", "if", "not", "'Documentation'", "in", "doc", ":", "doc", ".", "new_section", "(", "\"Documentation\"", ")", "ds", "=", "doc", "[", "'Documentation'", "]", "if", "not", "'Name'", ...
Add documentation entries for resources
[ "Add", "documentation", "entries", "for", "resources" ]
8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6
https://github.com/Metatab/metapack/blob/8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6/metapack/jupyter/exporters.py#L187-L218
train
50,516
Metatab/metapack
metapack/jupyter/exporters.py
NotebookExecutor.get_package_dir_name
def get_package_dir_name(self, nb): """This is the name of the package we will be creating. """ package_dir = self.package_dir if not package_dir: package_dir = getcwd() package_name = self.package_name if not package_name: doc = ExtractInlineMetatabDoc(package_url="metapack+file:" + package_dir).run(nb) if not doc: raise NotebookError("Notebook does not have an inline metatab doc") t = doc.find_first('Root.Name', section='Root') if not t: raise NotebookError("Inline Metatab doc doesnt have a Root.Name term") package_name = doc.as_version(None) return package_dir, package_name
python
def get_package_dir_name(self, nb): """This is the name of the package we will be creating. """ package_dir = self.package_dir if not package_dir: package_dir = getcwd() package_name = self.package_name if not package_name: doc = ExtractInlineMetatabDoc(package_url="metapack+file:" + package_dir).run(nb) if not doc: raise NotebookError("Notebook does not have an inline metatab doc") t = doc.find_first('Root.Name', section='Root') if not t: raise NotebookError("Inline Metatab doc doesnt have a Root.Name term") package_name = doc.as_version(None) return package_dir, package_name
[ "def", "get_package_dir_name", "(", "self", ",", "nb", ")", ":", "package_dir", "=", "self", ".", "package_dir", "if", "not", "package_dir", ":", "package_dir", "=", "getcwd", "(", ")", "package_name", "=", "self", ".", "package_name", "if", "not", "package_...
This is the name of the package we will be creating.
[ "This", "is", "the", "name", "of", "the", "package", "we", "will", "be", "creating", "." ]
8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6
https://github.com/Metatab/metapack/blob/8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6/metapack/jupyter/exporters.py#L248-L271
train
50,517
Metatab/metapack
metapack/jupyter/exporters.py
NotebookExecutor.get_output_dir
def get_output_dir(self, nb): """Open a notebook and determine the output directory from the name""" self.package_dir, self.package_name = self.get_package_dir_name(nb) return join(self.package_dir, self.package_name)
python
def get_output_dir(self, nb): """Open a notebook and determine the output directory from the name""" self.package_dir, self.package_name = self.get_package_dir_name(nb) return join(self.package_dir, self.package_name)
[ "def", "get_output_dir", "(", "self", ",", "nb", ")", ":", "self", ".", "package_dir", ",", "self", ".", "package_name", "=", "self", ".", "get_package_dir_name", "(", "nb", ")", "return", "join", "(", "self", ".", "package_dir", ",", "self", ".", "packa...
Open a notebook and determine the output directory from the name
[ "Open", "a", "notebook", "and", "determine", "the", "output", "directory", "from", "the", "name" ]
8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6
https://github.com/Metatab/metapack/blob/8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6/metapack/jupyter/exporters.py#L273-L277
train
50,518
Metatab/metapack
metapack/jupyter/exporters.py
NotebookExecutor.extract_terms
def extract_terms(self, nb): """Extract some term values, usually set with tags or metadata""" emt = ExtractMetatabTerms() emt.preprocess(nb, {}) return emt.terms
python
def extract_terms(self, nb): """Extract some term values, usually set with tags or metadata""" emt = ExtractMetatabTerms() emt.preprocess(nb, {}) return emt.terms
[ "def", "extract_terms", "(", "self", ",", "nb", ")", ":", "emt", "=", "ExtractMetatabTerms", "(", ")", "emt", ".", "preprocess", "(", "nb", ",", "{", "}", ")", "return", "emt", ".", "terms" ]
Extract some term values, usually set with tags or metadata
[ "Extract", "some", "term", "values", "usually", "set", "with", "tags", "or", "metadata" ]
8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6
https://github.com/Metatab/metapack/blob/8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6/metapack/jupyter/exporters.py#L279-L284
train
50,519
Metatab/metapack
metapack/jupyter/exporters.py
NotebookExecutor.from_notebook_node
def from_notebook_node(self, nb, resources=None, **kw): """Create a Metatab package from a notebook node """ nb_copy = copy.deepcopy(nb) # The the package name and directory, either from the inlined Metatab doc, # or from the config try: self.output_dir = self.get_output_dir(nb) except NotebookError as e: # Notebook probably lacks a metatab doc. self.log.fatal(e) sys.exit(1) assert self.output_dir resources = self._init_resources(resources) resources['outputs'] = {} if 'language' in nb['metadata']: resources['language'] = nb['metadata']['language'].lower() # Do any other configured preprocessing nb_copy, resources = self._preprocess(nb_copy, resources) # The Notebook can set some terms with tags self.extra_terms = self.extract_terms(nb_copy) # Clear the output before executing self.clear_output(nb_copy) nb_copy, resources = self.exec_notebook(nb_copy, resources, self.notebook_dir) eld = ExtractLibDirs() eld.preprocess(nb_copy, {}) self.lib_dirs = eld.lib_dirs efm = ExtractFinalMetatabDoc() efm.preprocess(nb_copy, {}) if not efm.doc: raise MetapackError("No metatab doc") self.doc = efm.doc for section, term, value in self.extra_terms: self.doc[section].get_or_new_term(term, value) nb, _ = RemoveMetatab().preprocess(nb, {}) resources['outputs']['notebooks/{}.ipynb'.format(self.package_name)] = nbformat.writes(nb).encode('utf-8') return efm.doc.as_csv(), resources
python
def from_notebook_node(self, nb, resources=None, **kw): """Create a Metatab package from a notebook node """ nb_copy = copy.deepcopy(nb) # The the package name and directory, either from the inlined Metatab doc, # or from the config try: self.output_dir = self.get_output_dir(nb) except NotebookError as e: # Notebook probably lacks a metatab doc. self.log.fatal(e) sys.exit(1) assert self.output_dir resources = self._init_resources(resources) resources['outputs'] = {} if 'language' in nb['metadata']: resources['language'] = nb['metadata']['language'].lower() # Do any other configured preprocessing nb_copy, resources = self._preprocess(nb_copy, resources) # The Notebook can set some terms with tags self.extra_terms = self.extract_terms(nb_copy) # Clear the output before executing self.clear_output(nb_copy) nb_copy, resources = self.exec_notebook(nb_copy, resources, self.notebook_dir) eld = ExtractLibDirs() eld.preprocess(nb_copy, {}) self.lib_dirs = eld.lib_dirs efm = ExtractFinalMetatabDoc() efm.preprocess(nb_copy, {}) if not efm.doc: raise MetapackError("No metatab doc") self.doc = efm.doc for section, term, value in self.extra_terms: self.doc[section].get_or_new_term(term, value) nb, _ = RemoveMetatab().preprocess(nb, {}) resources['outputs']['notebooks/{}.ipynb'.format(self.package_name)] = nbformat.writes(nb).encode('utf-8') return efm.doc.as_csv(), resources
[ "def", "from_notebook_node", "(", "self", ",", "nb", ",", "resources", "=", "None", ",", "*", "*", "kw", ")", ":", "nb_copy", "=", "copy", ".", "deepcopy", "(", "nb", ")", "# The the package name and directory, either from the inlined Metatab doc,", "# or from the c...
Create a Metatab package from a notebook node
[ "Create", "a", "Metatab", "package", "from", "a", "notebook", "node" ]
8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6
https://github.com/Metatab/metapack/blob/8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6/metapack/jupyter/exporters.py#L292-L347
train
50,520
Metatab/metapack
metapack/jupyter/exporters.py
HugoOutputExtractor.preprocess_cell
def preprocess_cell(self, cell, resources, cell_index): """Also extracts attachments""" from nbformat.notebooknode import NotebookNode attach_names = [] # Just move the attachment into an output for k, attach in cell.get('attachments', {}).items(): for mime_type in self.extract_output_types: if mime_type in attach: if not 'outputs' in cell: cell['outputs'] = [] o = NotebookNode({ 'data': NotebookNode({mime_type: attach[mime_type]}), 'metadata': NotebookNode({ 'filenames': {mime_type: k} # Will get re-written }), 'output_type': 'display_data' }) cell['outputs'].append(o) attach_names.append((mime_type, k)) nb, resources = super().preprocess_cell(cell, resources, cell_index) output_names = list(resources.get('outputs', {}).keys()) if attach_names: # We're going to assume that attachments are only on Markdown cells, and Markdown cells # can't generate output, so all of the outputs wee added. # reverse + zip matches the last len(attach_names) elements from output_names for output_name, (mimetype, an) in zip(reversed(output_names), reversed(attach_names)): # We'll post process to set the final output directory cell.source = re.sub('\(attachment:{}\)'.format(an), '(__IMGDIR__/{})'.format(output_name), cell.source) return nb, resources
python
def preprocess_cell(self, cell, resources, cell_index): """Also extracts attachments""" from nbformat.notebooknode import NotebookNode attach_names = [] # Just move the attachment into an output for k, attach in cell.get('attachments', {}).items(): for mime_type in self.extract_output_types: if mime_type in attach: if not 'outputs' in cell: cell['outputs'] = [] o = NotebookNode({ 'data': NotebookNode({mime_type: attach[mime_type]}), 'metadata': NotebookNode({ 'filenames': {mime_type: k} # Will get re-written }), 'output_type': 'display_data' }) cell['outputs'].append(o) attach_names.append((mime_type, k)) nb, resources = super().preprocess_cell(cell, resources, cell_index) output_names = list(resources.get('outputs', {}).keys()) if attach_names: # We're going to assume that attachments are only on Markdown cells, and Markdown cells # can't generate output, so all of the outputs wee added. # reverse + zip matches the last len(attach_names) elements from output_names for output_name, (mimetype, an) in zip(reversed(output_names), reversed(attach_names)): # We'll post process to set the final output directory cell.source = re.sub('\(attachment:{}\)'.format(an), '(__IMGDIR__/{})'.format(output_name), cell.source) return nb, resources
[ "def", "preprocess_cell", "(", "self", ",", "cell", ",", "resources", ",", "cell_index", ")", ":", "from", "nbformat", ".", "notebooknode", "import", "NotebookNode", "attach_names", "=", "[", "]", "# Just move the attachment into an output", "for", "k", ",", "atta...
Also extracts attachments
[ "Also", "extracts", "attachments" ]
8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6
https://github.com/Metatab/metapack/blob/8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6/metapack/jupyter/exporters.py#L385-L427
train
50,521
project-rig/rig
rig/place_and_route/place/hilbert.py
hilbert
def hilbert(level, angle=1, s=None): """Generator of points along a 2D Hilbert curve. This implements the L-system as described on `http://en.wikipedia.org/wiki/Hilbert_curve`. Parameters ---------- level : int Number of levels of recursion to use in generating the curve. The resulting curve will be `(2**level)-1` wide/tall. angle : int **For internal use only.** `1` if this is the 'positive' expansion of the grammar and `-1` for the 'negative' expansion. s : HilbertState **For internal use only.** The current state of the system. """ # An internal (mutable) state object (note: used in place of a closure with # nonlocal variables for Python 2 support). class HilbertState(object): def __init__(self, x=0, y=0, dx=1, dy=0): self.x, self.y, self.dx, self.dy = x, y, dx, dy # Create state object first time we're called while also yielding first # position if s is None: s = HilbertState() yield s.x, s.y if level <= 0: return # Turn left s.dx, s.dy = s.dy*-angle, s.dx*angle # Recurse negative for s.x, s.y in hilbert(level - 1, -angle, s): yield s.x, s.y # Move forward s.x, s.y = s.x + s.dx, s.y + s.dy yield s.x, s.y # Turn right s.dx, s.dy = s.dy*angle, s.dx*-angle # Recurse positive for s.x, s.y in hilbert(level - 1, angle, s): yield s.x, s.y # Move forward s.x, s.y = s.x + s.dx, s.y + s.dy yield s.x, s.y # Recurse positive for s.x, s.y in hilbert(level - 1, angle, s): yield s.x, s.y # Turn right s.dx, s.dy = s.dy*angle, s.dx*-angle # Move forward s.x, s.y = s.x + s.dx, s.y + s.dy yield s.x, s.y # Recurse negative for s.x, s.y in hilbert(level - 1, -angle, s): yield s.x, s.y # Turn left s.dx, s.dy = s.dy*-angle, s.dx*angle
python
def hilbert(level, angle=1, s=None): """Generator of points along a 2D Hilbert curve. This implements the L-system as described on `http://en.wikipedia.org/wiki/Hilbert_curve`. Parameters ---------- level : int Number of levels of recursion to use in generating the curve. The resulting curve will be `(2**level)-1` wide/tall. angle : int **For internal use only.** `1` if this is the 'positive' expansion of the grammar and `-1` for the 'negative' expansion. s : HilbertState **For internal use only.** The current state of the system. """ # An internal (mutable) state object (note: used in place of a closure with # nonlocal variables for Python 2 support). class HilbertState(object): def __init__(self, x=0, y=0, dx=1, dy=0): self.x, self.y, self.dx, self.dy = x, y, dx, dy # Create state object first time we're called while also yielding first # position if s is None: s = HilbertState() yield s.x, s.y if level <= 0: return # Turn left s.dx, s.dy = s.dy*-angle, s.dx*angle # Recurse negative for s.x, s.y in hilbert(level - 1, -angle, s): yield s.x, s.y # Move forward s.x, s.y = s.x + s.dx, s.y + s.dy yield s.x, s.y # Turn right s.dx, s.dy = s.dy*angle, s.dx*-angle # Recurse positive for s.x, s.y in hilbert(level - 1, angle, s): yield s.x, s.y # Move forward s.x, s.y = s.x + s.dx, s.y + s.dy yield s.x, s.y # Recurse positive for s.x, s.y in hilbert(level - 1, angle, s): yield s.x, s.y # Turn right s.dx, s.dy = s.dy*angle, s.dx*-angle # Move forward s.x, s.y = s.x + s.dx, s.y + s.dy yield s.x, s.y # Recurse negative for s.x, s.y in hilbert(level - 1, -angle, s): yield s.x, s.y # Turn left s.dx, s.dy = s.dy*-angle, s.dx*angle
[ "def", "hilbert", "(", "level", ",", "angle", "=", "1", ",", "s", "=", "None", ")", ":", "# An internal (mutable) state object (note: used in place of a closure with", "# nonlocal variables for Python 2 support).", "class", "HilbertState", "(", "object", ")", ":", "def", ...
Generator of points along a 2D Hilbert curve. This implements the L-system as described on `http://en.wikipedia.org/wiki/Hilbert_curve`. Parameters ---------- level : int Number of levels of recursion to use in generating the curve. The resulting curve will be `(2**level)-1` wide/tall. angle : int **For internal use only.** `1` if this is the 'positive' expansion of the grammar and `-1` for the 'negative' expansion. s : HilbertState **For internal use only.** The current state of the system.
[ "Generator", "of", "points", "along", "a", "2D", "Hilbert", "curve", "." ]
3a3e053d3214899b6d68758685835de0afd5542b
https://github.com/project-rig/rig/blob/3a3e053d3214899b6d68758685835de0afd5542b/rig/place_and_route/place/hilbert.py#L10-L80
train
50,522
project-rig/rig
rig/place_and_route/place/hilbert.py
hilbert_chip_order
def hilbert_chip_order(machine): """A generator which iterates over a set of chips in a machine in a hilbert path. For use as a chip ordering for the sequential placer. """ max_dimen = max(machine.width, machine.height) hilbert_levels = int(ceil(log(max_dimen, 2.0))) if max_dimen >= 1 else 0 return hilbert(hilbert_levels)
python
def hilbert_chip_order(machine): """A generator which iterates over a set of chips in a machine in a hilbert path. For use as a chip ordering for the sequential placer. """ max_dimen = max(machine.width, machine.height) hilbert_levels = int(ceil(log(max_dimen, 2.0))) if max_dimen >= 1 else 0 return hilbert(hilbert_levels)
[ "def", "hilbert_chip_order", "(", "machine", ")", ":", "max_dimen", "=", "max", "(", "machine", ".", "width", ",", "machine", ".", "height", ")", "hilbert_levels", "=", "int", "(", "ceil", "(", "log", "(", "max_dimen", ",", "2.0", ")", ")", ")", "if", ...
A generator which iterates over a set of chips in a machine in a hilbert path. For use as a chip ordering for the sequential placer.
[ "A", "generator", "which", "iterates", "over", "a", "set", "of", "chips", "in", "a", "machine", "in", "a", "hilbert", "path", "." ]
3a3e053d3214899b6d68758685835de0afd5542b
https://github.com/project-rig/rig/blob/3a3e053d3214899b6d68758685835de0afd5542b/rig/place_and_route/place/hilbert.py#L83-L91
train
50,523
project-rig/rig
rig/place_and_route/place/hilbert.py
place
def place(vertices_resources, nets, machine, constraints, breadth_first=True): """Places vertices in breadth-first order along a hilbert-curve path through the chips in the machine. This is a thin wrapper around the :py:func:`sequential <rig.place_and_route.place.sequential.place>` placement algorithm which optionally uses the :py:func:`breadth_first_vertex_order` vertex ordering (if the breadth_first argument is True, the default) and :py:func:`hilbert_chip_order` for chip ordering. Parameters ---------- breadth_first : bool Should vertices be placed in breadth first order rather than the iteration order of vertices_resources. True by default. """ return sequential_place(vertices_resources, nets, machine, constraints, (None if not breadth_first else breadth_first_vertex_order(vertices_resources, nets)), hilbert_chip_order(machine))
python
def place(vertices_resources, nets, machine, constraints, breadth_first=True): """Places vertices in breadth-first order along a hilbert-curve path through the chips in the machine. This is a thin wrapper around the :py:func:`sequential <rig.place_and_route.place.sequential.place>` placement algorithm which optionally uses the :py:func:`breadth_first_vertex_order` vertex ordering (if the breadth_first argument is True, the default) and :py:func:`hilbert_chip_order` for chip ordering. Parameters ---------- breadth_first : bool Should vertices be placed in breadth first order rather than the iteration order of vertices_resources. True by default. """ return sequential_place(vertices_resources, nets, machine, constraints, (None if not breadth_first else breadth_first_vertex_order(vertices_resources, nets)), hilbert_chip_order(machine))
[ "def", "place", "(", "vertices_resources", ",", "nets", ",", "machine", ",", "constraints", ",", "breadth_first", "=", "True", ")", ":", "return", "sequential_place", "(", "vertices_resources", ",", "nets", ",", "machine", ",", "constraints", ",", "(", "None",...
Places vertices in breadth-first order along a hilbert-curve path through the chips in the machine. This is a thin wrapper around the :py:func:`sequential <rig.place_and_route.place.sequential.place>` placement algorithm which optionally uses the :py:func:`breadth_first_vertex_order` vertex ordering (if the breadth_first argument is True, the default) and :py:func:`hilbert_chip_order` for chip ordering. Parameters ---------- breadth_first : bool Should vertices be placed in breadth first order rather than the iteration order of vertices_resources. True by default.
[ "Places", "vertices", "in", "breadth", "-", "first", "order", "along", "a", "hilbert", "-", "curve", "path", "through", "the", "chips", "in", "the", "machine", "." ]
3a3e053d3214899b6d68758685835de0afd5542b
https://github.com/project-rig/rig/blob/3a3e053d3214899b6d68758685835de0afd5542b/rig/place_and_route/place/hilbert.py#L94-L115
train
50,524
project-rig/rig
rig/routing_table/minimise.py
minimise_tables
def minimise_tables(routing_tables, target_lengths, methods=(remove_default_entries, ordered_covering)): """Utility function which attempts to minimises routing tables for multiple chips. For each routing table supplied, this function will attempt to use the minimisation algorithms given (or some sensible default algorithms), trying each sequentially until a target number of routing entries has been reached. Parameters ---------- routing_tables : {(x, y): [\ :py:class:`~rig.routing_table.RoutingTableEntry`, ...], ...} Dictionary mapping chip co-ordinates to the routing tables associated with that chip. NOTE: This is the data structure as returned by :py:meth:`~rig.routing_table.routing_tree_to_tables`. target_lengths : int or {(x, y): int or None, ...} or None Maximum length of routing tables. If an integer this is assumed to be the maximum length for any table; if a dictionary then it is assumed to be a mapping from co-ordinate to maximum length (or None); if None then tables will be minimised as far as possible. methods : Each method is tried in the order presented and the first to meet the required target length for a given chip is used. Consequently less computationally costly algorithms should be nearer the start of the list. The defaults will try to remove default routes (:py:meth:`rig.routing_table.remove_default_routes.minimise`) and then fall back on the ordered covering algorithm (:py:meth:`rig.routing_table.ordered_covering.minimise`). Returns ------- {(x, y): [:py:class:`~rig.routing_table.RoutingTableEntry`, ...], ...} Minimised routing tables, guaranteed to be at least as small as the table sizes specified by `target_lengths`. Raises ------ MinimisationFailedError If no method can sufficiently minimise a table. """ # Coerce the target lengths into the correct forms if not isinstance(target_lengths, dict): lengths = collections.defaultdict(lambda: target_lengths) else: lengths = target_lengths # Minimise the routing tables new_tables = dict() for chip, table in iteritems(routing_tables): # Try to minimise the table try: new_table = minimise_table(table, lengths[chip], methods) except MinimisationFailedError as exc: exc.chip = chip raise # Store the table if it isn't empty if new_table: new_tables[chip] = new_table return new_tables
python
def minimise_tables(routing_tables, target_lengths, methods=(remove_default_entries, ordered_covering)): """Utility function which attempts to minimises routing tables for multiple chips. For each routing table supplied, this function will attempt to use the minimisation algorithms given (or some sensible default algorithms), trying each sequentially until a target number of routing entries has been reached. Parameters ---------- routing_tables : {(x, y): [\ :py:class:`~rig.routing_table.RoutingTableEntry`, ...], ...} Dictionary mapping chip co-ordinates to the routing tables associated with that chip. NOTE: This is the data structure as returned by :py:meth:`~rig.routing_table.routing_tree_to_tables`. target_lengths : int or {(x, y): int or None, ...} or None Maximum length of routing tables. If an integer this is assumed to be the maximum length for any table; if a dictionary then it is assumed to be a mapping from co-ordinate to maximum length (or None); if None then tables will be minimised as far as possible. methods : Each method is tried in the order presented and the first to meet the required target length for a given chip is used. Consequently less computationally costly algorithms should be nearer the start of the list. The defaults will try to remove default routes (:py:meth:`rig.routing_table.remove_default_routes.minimise`) and then fall back on the ordered covering algorithm (:py:meth:`rig.routing_table.ordered_covering.minimise`). Returns ------- {(x, y): [:py:class:`~rig.routing_table.RoutingTableEntry`, ...], ...} Minimised routing tables, guaranteed to be at least as small as the table sizes specified by `target_lengths`. Raises ------ MinimisationFailedError If no method can sufficiently minimise a table. """ # Coerce the target lengths into the correct forms if not isinstance(target_lengths, dict): lengths = collections.defaultdict(lambda: target_lengths) else: lengths = target_lengths # Minimise the routing tables new_tables = dict() for chip, table in iteritems(routing_tables): # Try to minimise the table try: new_table = minimise_table(table, lengths[chip], methods) except MinimisationFailedError as exc: exc.chip = chip raise # Store the table if it isn't empty if new_table: new_tables[chip] = new_table return new_tables
[ "def", "minimise_tables", "(", "routing_tables", ",", "target_lengths", ",", "methods", "=", "(", "remove_default_entries", ",", "ordered_covering", ")", ")", ":", "# Coerce the target lengths into the correct forms", "if", "not", "isinstance", "(", "target_lengths", ",",...
Utility function which attempts to minimises routing tables for multiple chips. For each routing table supplied, this function will attempt to use the minimisation algorithms given (or some sensible default algorithms), trying each sequentially until a target number of routing entries has been reached. Parameters ---------- routing_tables : {(x, y): [\ :py:class:`~rig.routing_table.RoutingTableEntry`, ...], ...} Dictionary mapping chip co-ordinates to the routing tables associated with that chip. NOTE: This is the data structure as returned by :py:meth:`~rig.routing_table.routing_tree_to_tables`. target_lengths : int or {(x, y): int or None, ...} or None Maximum length of routing tables. If an integer this is assumed to be the maximum length for any table; if a dictionary then it is assumed to be a mapping from co-ordinate to maximum length (or None); if None then tables will be minimised as far as possible. methods : Each method is tried in the order presented and the first to meet the required target length for a given chip is used. Consequently less computationally costly algorithms should be nearer the start of the list. The defaults will try to remove default routes (:py:meth:`rig.routing_table.remove_default_routes.minimise`) and then fall back on the ordered covering algorithm (:py:meth:`rig.routing_table.ordered_covering.minimise`). Returns ------- {(x, y): [:py:class:`~rig.routing_table.RoutingTableEntry`, ...], ...} Minimised routing tables, guaranteed to be at least as small as the table sizes specified by `target_lengths`. Raises ------ MinimisationFailedError If no method can sufficiently minimise a table.
[ "Utility", "function", "which", "attempts", "to", "minimises", "routing", "tables", "for", "multiple", "chips", "." ]
3a3e053d3214899b6d68758685835de0afd5542b
https://github.com/project-rig/rig/blob/3a3e053d3214899b6d68758685835de0afd5542b/rig/routing_table/minimise.py#L9-L71
train
50,525
project-rig/rig
rig/routing_table/minimise.py
minimise_table
def minimise_table(table, target_length, methods=(remove_default_entries, ordered_covering)): """Apply different minimisation algorithms to minimise a single routing table. Parameters ---------- table : [:py:class:`~rig.routing_table.RoutingTableEntry`, ...] Routing table to minimise. NOTE: This is the data structure as returned by :py:meth:`~rig.routing_table.routing_tree_to_tables`. target_length : int or None Maximum length of the routing table. If None then all methods will be tried and the smallest achieved table will be returned. methods : Each method is tried in the order presented and the first to meet the required target length for a given chip is used. Consequently less computationally costly algorithms should be nearer the start of the list. The defaults will try to remove default routes (:py:meth:rig.routing_table.remove_default_routes.minimise) and then fall back on the ordered covering algorithm (:py:meth:rig.routing_table.ordered_covering.minimise). Returns ------- [:py:class:`~rig.routing_table.RoutingTableEntry`, ...] Minimised routing table, guaranteed to be at least as small as `target_length`, or as small as possible if `target_length` is None. Raises ------ MinimisationFailedError If no method can sufficiently minimise the table. """ # Add a final method which checks the size of the table and returns it if # the size is correct. NOTE: This method will avoid running any other # minimisers if the table is already sufficiently small. methods = list(methods) methods.insert(0, _identity) if target_length is not None: best_achieved = len(table) # Try each minimiser in turn until the table is small enough for f in methods: try: # Minimise the table, if this fails a MinimisationFailedError # will be raised and the return will not be executed. new_table = f(table, target_length) return new_table except MinimisationFailedError as exc: # Store the best achieved final length if best_achieved is None or exc.final_length < best_achieved: best_achieved = exc.final_length # The table must still be too large raise MinimisationFailedError(target_length, best_achieved) else: # Try all methods and return the smallest table return min((f(table, target_length) for f in methods), key=len)
python
def minimise_table(table, target_length, methods=(remove_default_entries, ordered_covering)): """Apply different minimisation algorithms to minimise a single routing table. Parameters ---------- table : [:py:class:`~rig.routing_table.RoutingTableEntry`, ...] Routing table to minimise. NOTE: This is the data structure as returned by :py:meth:`~rig.routing_table.routing_tree_to_tables`. target_length : int or None Maximum length of the routing table. If None then all methods will be tried and the smallest achieved table will be returned. methods : Each method is tried in the order presented and the first to meet the required target length for a given chip is used. Consequently less computationally costly algorithms should be nearer the start of the list. The defaults will try to remove default routes (:py:meth:rig.routing_table.remove_default_routes.minimise) and then fall back on the ordered covering algorithm (:py:meth:rig.routing_table.ordered_covering.minimise). Returns ------- [:py:class:`~rig.routing_table.RoutingTableEntry`, ...] Minimised routing table, guaranteed to be at least as small as `target_length`, or as small as possible if `target_length` is None. Raises ------ MinimisationFailedError If no method can sufficiently minimise the table. """ # Add a final method which checks the size of the table and returns it if # the size is correct. NOTE: This method will avoid running any other # minimisers if the table is already sufficiently small. methods = list(methods) methods.insert(0, _identity) if target_length is not None: best_achieved = len(table) # Try each minimiser in turn until the table is small enough for f in methods: try: # Minimise the table, if this fails a MinimisationFailedError # will be raised and the return will not be executed. new_table = f(table, target_length) return new_table except MinimisationFailedError as exc: # Store the best achieved final length if best_achieved is None or exc.final_length < best_achieved: best_achieved = exc.final_length # The table must still be too large raise MinimisationFailedError(target_length, best_achieved) else: # Try all methods and return the smallest table return min((f(table, target_length) for f in methods), key=len)
[ "def", "minimise_table", "(", "table", ",", "target_length", ",", "methods", "=", "(", "remove_default_entries", ",", "ordered_covering", ")", ")", ":", "# Add a final method which checks the size of the table and returns it if", "# the size is correct. NOTE: This method will avoid...
Apply different minimisation algorithms to minimise a single routing table. Parameters ---------- table : [:py:class:`~rig.routing_table.RoutingTableEntry`, ...] Routing table to minimise. NOTE: This is the data structure as returned by :py:meth:`~rig.routing_table.routing_tree_to_tables`. target_length : int or None Maximum length of the routing table. If None then all methods will be tried and the smallest achieved table will be returned. methods : Each method is tried in the order presented and the first to meet the required target length for a given chip is used. Consequently less computationally costly algorithms should be nearer the start of the list. The defaults will try to remove default routes (:py:meth:rig.routing_table.remove_default_routes.minimise) and then fall back on the ordered covering algorithm (:py:meth:rig.routing_table.ordered_covering.minimise). Returns ------- [:py:class:`~rig.routing_table.RoutingTableEntry`, ...] Minimised routing table, guaranteed to be at least as small as `target_length`, or as small as possible if `target_length` is None. Raises ------ MinimisationFailedError If no method can sufficiently minimise the table.
[ "Apply", "different", "minimisation", "algorithms", "to", "minimise", "a", "single", "routing", "table", "." ]
3a3e053d3214899b6d68758685835de0afd5542b
https://github.com/project-rig/rig/blob/3a3e053d3214899b6d68758685835de0afd5542b/rig/routing_table/minimise.py#L74-L132
train
50,526
project-rig/rig
rig/routing_table/minimise.py
_identity
def _identity(table, target_length): """Identity minimisation function.""" if target_length is None or len(table) < target_length: return table raise MinimisationFailedError(target_length, len(table))
python
def _identity(table, target_length): """Identity minimisation function.""" if target_length is None or len(table) < target_length: return table raise MinimisationFailedError(target_length, len(table))
[ "def", "_identity", "(", "table", ",", "target_length", ")", ":", "if", "target_length", "is", "None", "or", "len", "(", "table", ")", "<", "target_length", ":", "return", "table", "raise", "MinimisationFailedError", "(", "target_length", ",", "len", "(", "t...
Identity minimisation function.
[ "Identity", "minimisation", "function", "." ]
3a3e053d3214899b6d68758685835de0afd5542b
https://github.com/project-rig/rig/blob/3a3e053d3214899b6d68758685835de0afd5542b/rig/routing_table/minimise.py#L135-L139
train
50,527
Metatab/metapack
metapack/cli/metakan.py
configure_ckan
def configure_ckan(m): """Load groups and organizations, from a file in Metatab format""" from ckanapi import RemoteCKAN try: doc = MetapackDoc(m.mt_file, cache=m.cache) except (IOError, MetatabError) as e: err("Failed to open metatab '{}': {}".format(m.mt_file, e)) c = RemoteCKAN(m.ckan_url, apikey=m.api_key) groups = { g['name']:g for g in c.action.group_list(all_fields=True) } for g in doc['Groups']: if g.value not in groups: prt('Creating group: ', g.value) c.action.group_create(name=g.value, title=g.get_value('title'), description=g.get_value('description'), id=g.get_value('id'), image_url=g.get_value('image_url')) orgs = {o['name']: o for o in c.action.organization_list(all_fields=True)} for o in doc['Organizations']: if o.value not in orgs: prt('Creating organization: ', o.value) c.action.organization_create(name=o.value, title=o.get_value('title'), description=o.get_value('description'), id=o.get_value('id'), image_url=o.get_value('image_url'))
python
def configure_ckan(m): """Load groups and organizations, from a file in Metatab format""" from ckanapi import RemoteCKAN try: doc = MetapackDoc(m.mt_file, cache=m.cache) except (IOError, MetatabError) as e: err("Failed to open metatab '{}': {}".format(m.mt_file, e)) c = RemoteCKAN(m.ckan_url, apikey=m.api_key) groups = { g['name']:g for g in c.action.group_list(all_fields=True) } for g in doc['Groups']: if g.value not in groups: prt('Creating group: ', g.value) c.action.group_create(name=g.value, title=g.get_value('title'), description=g.get_value('description'), id=g.get_value('id'), image_url=g.get_value('image_url')) orgs = {o['name']: o for o in c.action.organization_list(all_fields=True)} for o in doc['Organizations']: if o.value not in orgs: prt('Creating organization: ', o.value) c.action.organization_create(name=o.value, title=o.get_value('title'), description=o.get_value('description'), id=o.get_value('id'), image_url=o.get_value('image_url'))
[ "def", "configure_ckan", "(", "m", ")", ":", "from", "ckanapi", "import", "RemoteCKAN", "try", ":", "doc", "=", "MetapackDoc", "(", "m", ".", "mt_file", ",", "cache", "=", "m", ".", "cache", ")", "except", "(", "IOError", ",", "MetatabError", ")", "as"...
Load groups and organizations, from a file in Metatab format
[ "Load", "groups", "and", "organizations", "from", "a", "file", "in", "Metatab", "format" ]
8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6
https://github.com/Metatab/metapack/blob/8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6/metapack/cli/metakan.py#L353-L386
train
50,528
Metatab/metapack
metapack/cli/metakan.py
dump_ckan
def dump_ckan(m): """Create a groups and organization file""" doc = MetapackDoc(cache=m.cache) doc.new_section('Groups', 'Title Description Id Image_url'.split()) doc.new_section('Organizations', 'Title Description Id Image_url'.split()) c = RemoteCKAN(m.ckan_url, apikey=m.api_key) for g in c.action.group_list(all_fields=True): print(g.keys()) for o in c.action.organization_list(all_fields=True): print(g.keys())
python
def dump_ckan(m): """Create a groups and organization file""" doc = MetapackDoc(cache=m.cache) doc.new_section('Groups', 'Title Description Id Image_url'.split()) doc.new_section('Organizations', 'Title Description Id Image_url'.split()) c = RemoteCKAN(m.ckan_url, apikey=m.api_key) for g in c.action.group_list(all_fields=True): print(g.keys()) for o in c.action.organization_list(all_fields=True): print(g.keys())
[ "def", "dump_ckan", "(", "m", ")", ":", "doc", "=", "MetapackDoc", "(", "cache", "=", "m", ".", "cache", ")", "doc", ".", "new_section", "(", "'Groups'", ",", "'Title Description Id Image_url'", ".", "split", "(", ")", ")", "doc", ".", "new_section", "("...
Create a groups and organization file
[ "Create", "a", "groups", "and", "organization", "file" ]
8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6
https://github.com/Metatab/metapack/blob/8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6/metapack/cli/metakan.py#L442-L455
train
50,529
Metatab/metapack
metapack/cli/metakan.py
MetapackCliMemo.update_mt_arg
def update_mt_arg(self, metatabfile): """Return a new memo with a new metatabfile argument""" o = MetapackCliMemo(self.args) o.set_mt_arg(metatabfile) return o
python
def update_mt_arg(self, metatabfile): """Return a new memo with a new metatabfile argument""" o = MetapackCliMemo(self.args) o.set_mt_arg(metatabfile) return o
[ "def", "update_mt_arg", "(", "self", ",", "metatabfile", ")", ":", "o", "=", "MetapackCliMemo", "(", "self", ".", "args", ")", "o", ".", "set_mt_arg", "(", "metatabfile", ")", "return", "o" ]
Return a new memo with a new metatabfile argument
[ "Return", "a", "new", "memo", "with", "a", "new", "metatabfile", "argument" ]
8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6
https://github.com/Metatab/metapack/blob/8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6/metapack/cli/metakan.py#L45-L49
train
50,530
Metatab/metapack
metapack/util.py
declaration_path
def declaration_path(name): """Return the path to an included declaration""" from os.path import dirname, join, exists import metatab.declarations from metatab.exc import IncludeError d = dirname(metatab.declarations.__file__) path = join(d, name) if not exists(path): path = join(d, name + '.csv') if not exists(path): raise IncludeError("No local declaration file for name '{}' ".format(name)) return path
python
def declaration_path(name): """Return the path to an included declaration""" from os.path import dirname, join, exists import metatab.declarations from metatab.exc import IncludeError d = dirname(metatab.declarations.__file__) path = join(d, name) if not exists(path): path = join(d, name + '.csv') if not exists(path): raise IncludeError("No local declaration file for name '{}' ".format(name)) return path
[ "def", "declaration_path", "(", "name", ")", ":", "from", "os", ".", "path", "import", "dirname", ",", "join", ",", "exists", "import", "metatab", ".", "declarations", "from", "metatab", ".", "exc", "import", "IncludeError", "d", "=", "dirname", "(", "meta...
Return the path to an included declaration
[ "Return", "the", "path", "to", "an", "included", "declaration" ]
8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6
https://github.com/Metatab/metapack/blob/8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6/metapack/util.py#L16-L32
train
50,531
Metatab/metapack
metapack/util.py
flatten
def flatten(d, sep='.'): """Flatten a data structure into tuples""" def _flatten(e, parent_key='', sep='.'): import collections prefix = parent_key + sep if parent_key else '' if isinstance(e, collections.MutableMapping): return tuple((prefix + k2, v2) for k, v in e.items() for k2, v2 in _flatten(v, k, sep)) elif isinstance(e, collections.MutableSequence): return tuple((prefix + k2, v2) for i, v in enumerate(e) for k2, v2 in _flatten(v, str(i), sep)) else: return (parent_key, (e,)), return tuple((k, v[0]) for k, v in _flatten(d, '', sep))
python
def flatten(d, sep='.'): """Flatten a data structure into tuples""" def _flatten(e, parent_key='', sep='.'): import collections prefix = parent_key + sep if parent_key else '' if isinstance(e, collections.MutableMapping): return tuple((prefix + k2, v2) for k, v in e.items() for k2, v2 in _flatten(v, k, sep)) elif isinstance(e, collections.MutableSequence): return tuple((prefix + k2, v2) for i, v in enumerate(e) for k2, v2 in _flatten(v, str(i), sep)) else: return (parent_key, (e,)), return tuple((k, v[0]) for k, v in _flatten(d, '', sep))
[ "def", "flatten", "(", "d", ",", "sep", "=", "'.'", ")", ":", "def", "_flatten", "(", "e", ",", "parent_key", "=", "''", ",", "sep", "=", "'.'", ")", ":", "import", "collections", "prefix", "=", "parent_key", "+", "sep", "if", "parent_key", "else", ...
Flatten a data structure into tuples
[ "Flatten", "a", "data", "structure", "into", "tuples" ]
8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6
https://github.com/Metatab/metapack/blob/8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6/metapack/util.py#L59-L74
train
50,532
Metatab/metapack
metapack/util.py
make_dir_structure
def make_dir_structure(base_dir): """Make the build directory structure. """ def maybe_makedir(*args): p = join(base_dir, *args) if exists(p) and not isdir(p): raise IOError("File '{}' exists but is not a directory ".format(p)) if not exists(p): makedirs(p) maybe_makedir(DOWNLOAD_DIR) maybe_makedir(PACKAGE_DIR) maybe_makedir(OLD_DIR)
python
def make_dir_structure(base_dir): """Make the build directory structure. """ def maybe_makedir(*args): p = join(base_dir, *args) if exists(p) and not isdir(p): raise IOError("File '{}' exists but is not a directory ".format(p)) if not exists(p): makedirs(p) maybe_makedir(DOWNLOAD_DIR) maybe_makedir(PACKAGE_DIR) maybe_makedir(OLD_DIR)
[ "def", "make_dir_structure", "(", "base_dir", ")", ":", "def", "maybe_makedir", "(", "*", "args", ")", ":", "p", "=", "join", "(", "base_dir", ",", "*", "args", ")", "if", "exists", "(", "p", ")", "and", "not", "isdir", "(", "p", ")", ":", "raise",...
Make the build directory structure.
[ "Make", "the", "build", "directory", "structure", "." ]
8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6
https://github.com/Metatab/metapack/blob/8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6/metapack/util.py#L89-L104
train
50,533
Metatab/metapack
metapack/util.py
guess_format
def guess_format(url): """Try to guess the format of a resource, possibly with a HEAD request""" import requests from requests.exceptions import InvalidSchema from rowgenerators import parse_url_to_dict parts = parse_url_to_dict(url) # Guess_type fails for root urls like 'http://civicknowledge.com' if parts.get('path'): type, encoding = mimetypes.guess_type(url) elif parts['scheme'] in ('http', 'https'): type, encoding = 'text/html', None # Assume it is a root url else: type, encoding = None, None if type is None: try: r = requests.head(url, allow_redirects=False) type = r.headers['Content-Type'] if ';' in type: type, encoding = [e.strip() for e in type.split(';')] except InvalidSchema: pass # It's probably FTP return type, mime_map.get(type)
python
def guess_format(url): """Try to guess the format of a resource, possibly with a HEAD request""" import requests from requests.exceptions import InvalidSchema from rowgenerators import parse_url_to_dict parts = parse_url_to_dict(url) # Guess_type fails for root urls like 'http://civicknowledge.com' if parts.get('path'): type, encoding = mimetypes.guess_type(url) elif parts['scheme'] in ('http', 'https'): type, encoding = 'text/html', None # Assume it is a root url else: type, encoding = None, None if type is None: try: r = requests.head(url, allow_redirects=False) type = r.headers['Content-Type'] if ';' in type: type, encoding = [e.strip() for e in type.split(';')] except InvalidSchema: pass # It's probably FTP return type, mime_map.get(type)
[ "def", "guess_format", "(", "url", ")", ":", "import", "requests", "from", "requests", ".", "exceptions", "import", "InvalidSchema", "from", "rowgenerators", "import", "parse_url_to_dict", "parts", "=", "parse_url_to_dict", "(", "url", ")", "# Guess_type fails for roo...
Try to guess the format of a resource, possibly with a HEAD request
[ "Try", "to", "guess", "the", "format", "of", "a", "resource", "possibly", "with", "a", "HEAD", "request" ]
8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6
https://github.com/Metatab/metapack/blob/8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6/metapack/util.py#L202-L229
train
50,534
Metatab/metapack
metapack/util.py
get_materialized_data_cache
def get_materialized_data_cache(doc=None): """Return the cache directory where data can be written during a build, usually for a Jupyter notebook that generates many files for each execution""" from metapack.constants import MATERIALIZED_DATA_PREFIX from os.path import join if not doc: from metapack import Downloader downloader = Downloader() return downloader.cache.getsyspath(MATERIALIZED_DATA_PREFIX) else: dr = doc._cache.getsyspath(join(MATERIALIZED_DATA_PREFIX, doc.name)) ensure_dir(dr) return dr
python
def get_materialized_data_cache(doc=None): """Return the cache directory where data can be written during a build, usually for a Jupyter notebook that generates many files for each execution""" from metapack.constants import MATERIALIZED_DATA_PREFIX from os.path import join if not doc: from metapack import Downloader downloader = Downloader() return downloader.cache.getsyspath(MATERIALIZED_DATA_PREFIX) else: dr = doc._cache.getsyspath(join(MATERIALIZED_DATA_PREFIX, doc.name)) ensure_dir(dr) return dr
[ "def", "get_materialized_data_cache", "(", "doc", "=", "None", ")", ":", "from", "metapack", ".", "constants", "import", "MATERIALIZED_DATA_PREFIX", "from", "os", ".", "path", "import", "join", "if", "not", "doc", ":", "from", "metapack", "import", "Downloader",...
Return the cache directory where data can be written during a build, usually for a Jupyter notebook that generates many files for each execution
[ "Return", "the", "cache", "directory", "where", "data", "can", "be", "written", "during", "a", "build", "usually", "for", "a", "Jupyter", "notebook", "that", "generates", "many", "files", "for", "each", "execution" ]
8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6
https://github.com/Metatab/metapack/blob/8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6/metapack/util.py#L339-L355
train
50,535
project-rig/rig
rig/place_and_route/routing_tree.py
RoutingTree.traverse
def traverse(self): """Traverse the tree yielding the direction taken to a node, the co-ordinates of that node and the directions leading from the Node. Yields ------ (direction, (x, y), {:py:class:`~rig.routing_table.Routes`, ...}) Direction taken to reach a Node in the tree, the (x, y) co-ordinate of that Node and routes leading to children of the Node. """ # A queue of (direction, node) to visit. The direction is the Links # entry which describes the direction in which we last moved to reach # the node (or None for the root). to_visit = deque([(None, self)]) while to_visit: direction, node = to_visit.popleft() # Determine the set of directions we must travel to reach the # children out_directions = set() for child_direction, child in node.children: # Note that if the direction is unspecified, we simply # (silently) don't add a route for that child. if child_direction is not None: out_directions.add(child_direction) # Search the next steps of the route too if isinstance(child, RoutingTree): assert child_direction is not None to_visit.append((child_direction, child)) # Yield the information pertaining to this Node yield direction, node.chip, out_directions
python
def traverse(self): """Traverse the tree yielding the direction taken to a node, the co-ordinates of that node and the directions leading from the Node. Yields ------ (direction, (x, y), {:py:class:`~rig.routing_table.Routes`, ...}) Direction taken to reach a Node in the tree, the (x, y) co-ordinate of that Node and routes leading to children of the Node. """ # A queue of (direction, node) to visit. The direction is the Links # entry which describes the direction in which we last moved to reach # the node (or None for the root). to_visit = deque([(None, self)]) while to_visit: direction, node = to_visit.popleft() # Determine the set of directions we must travel to reach the # children out_directions = set() for child_direction, child in node.children: # Note that if the direction is unspecified, we simply # (silently) don't add a route for that child. if child_direction is not None: out_directions.add(child_direction) # Search the next steps of the route too if isinstance(child, RoutingTree): assert child_direction is not None to_visit.append((child_direction, child)) # Yield the information pertaining to this Node yield direction, node.chip, out_directions
[ "def", "traverse", "(", "self", ")", ":", "# A queue of (direction, node) to visit. The direction is the Links", "# entry which describes the direction in which we last moved to reach", "# the node (or None for the root).", "to_visit", "=", "deque", "(", "[", "(", "None", ",", "sel...
Traverse the tree yielding the direction taken to a node, the co-ordinates of that node and the directions leading from the Node. Yields ------ (direction, (x, y), {:py:class:`~rig.routing_table.Routes`, ...}) Direction taken to reach a Node in the tree, the (x, y) co-ordinate of that Node and routes leading to children of the Node.
[ "Traverse", "the", "tree", "yielding", "the", "direction", "taken", "to", "a", "node", "the", "co", "-", "ordinates", "of", "that", "node", "and", "the", "directions", "leading", "from", "the", "Node", "." ]
3a3e053d3214899b6d68758685835de0afd5542b
https://github.com/project-rig/rig/blob/3a3e053d3214899b6d68758685835de0afd5542b/rig/place_and_route/routing_tree.py#L96-L128
train
50,536
Metatab/metapack
metapack/cli/doc.py
wrap_url
def wrap_url(s, l): """Wrap a URL string""" parts = s.split('/') if len(parts) == 1: return parts[0] else: i = 0 lines = [] for j in range(i, len(parts) + 1): tv = '/'.join(parts[i:j]) nv = '/'.join(parts[i:j + 1]) if len(nv) > l or nv == tv: i = j lines.append(tv) return '/\n'.join(lines)
python
def wrap_url(s, l): """Wrap a URL string""" parts = s.split('/') if len(parts) == 1: return parts[0] else: i = 0 lines = [] for j in range(i, len(parts) + 1): tv = '/'.join(parts[i:j]) nv = '/'.join(parts[i:j + 1]) if len(nv) > l or nv == tv: i = j lines.append(tv) return '/\n'.join(lines)
[ "def", "wrap_url", "(", "s", ",", "l", ")", ":", "parts", "=", "s", ".", "split", "(", "'/'", ")", "if", "len", "(", "parts", ")", "==", "1", ":", "return", "parts", "[", "0", "]", "else", ":", "i", "=", "0", "lines", "=", "[", "]", "for", ...
Wrap a URL string
[ "Wrap", "a", "URL", "string" ]
8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6
https://github.com/Metatab/metapack/blob/8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6/metapack/cli/doc.py#L291-L308
train
50,537
Parsely/probably
probably/hll.py
HyperLogLog.add
def add(self, uuid): """ Adds a key to the HyperLogLog """ if uuid: # Computing the hash try: x = hash64(uuid) except UnicodeEncodeError: x = hash64(uuid.encode('ascii', 'ignore')) # Finding the register to update by using the first b bits as an index j = x & ((1 << self.b) - 1) # Remove those b bits w = x >> self.b # Find the first 0 in the remaining bit pattern self.M[j] = max(self.M[j], self._get_rho(w, self.bitcount_arr))
python
def add(self, uuid): """ Adds a key to the HyperLogLog """ if uuid: # Computing the hash try: x = hash64(uuid) except UnicodeEncodeError: x = hash64(uuid.encode('ascii', 'ignore')) # Finding the register to update by using the first b bits as an index j = x & ((1 << self.b) - 1) # Remove those b bits w = x >> self.b # Find the first 0 in the remaining bit pattern self.M[j] = max(self.M[j], self._get_rho(w, self.bitcount_arr))
[ "def", "add", "(", "self", ",", "uuid", ")", ":", "if", "uuid", ":", "# Computing the hash", "try", ":", "x", "=", "hash64", "(", "uuid", ")", "except", "UnicodeEncodeError", ":", "x", "=", "hash64", "(", "uuid", ".", "encode", "(", "'ascii'", ",", "...
Adds a key to the HyperLogLog
[ "Adds", "a", "key", "to", "the", "HyperLogLog" ]
5d80855c1645fb2813678d5bcfe6108e33d80b9e
https://github.com/Parsely/probably/blob/5d80855c1645fb2813678d5bcfe6108e33d80b9e/probably/hll.py#L47-L60
train
50,538
Parsely/probably
probably/hll.py
HyperLogLog.estimate
def estimate(self): """ Returns the estimate of the cardinality """ E = self.alpha * float(self.m ** 2) / np.power(2.0, - self.M).sum() if E <= 2.5 * self.m: # Small range correction V = self.m - np.count_nonzero(self.M) return int(self.m * np.log(self.m / float(V))) if V > 0 else int(E) # intermidiate range correction -> No correction elif E <= float(long(1) << self.precision) / 30.0: return int(E) else: return int(-(long(1) << self.precision) * np.log(1.0 - E / (long(1) << self.precision)))
python
def estimate(self): """ Returns the estimate of the cardinality """ E = self.alpha * float(self.m ** 2) / np.power(2.0, - self.M).sum() if E <= 2.5 * self.m: # Small range correction V = self.m - np.count_nonzero(self.M) return int(self.m * np.log(self.m / float(V))) if V > 0 else int(E) # intermidiate range correction -> No correction elif E <= float(long(1) << self.precision) / 30.0: return int(E) else: return int(-(long(1) << self.precision) * np.log(1.0 - E / (long(1) << self.precision)))
[ "def", "estimate", "(", "self", ")", ":", "E", "=", "self", ".", "alpha", "*", "float", "(", "self", ".", "m", "**", "2", ")", "/", "np", ".", "power", "(", "2.0", ",", "-", "self", ".", "M", ")", ".", "sum", "(", ")", "if", "E", "<=", "2...
Returns the estimate of the cardinality
[ "Returns", "the", "estimate", "of", "the", "cardinality" ]
5d80855c1645fb2813678d5bcfe6108e33d80b9e
https://github.com/Parsely/probably/blob/5d80855c1645fb2813678d5bcfe6108e33d80b9e/probably/hll.py#L72-L83
train
50,539
Metatab/metapack
metapack/terms.py
Resource.base_url
def base_url(self): """Base URL for resolving resource URLs""" if self.doc.package_url: return self.doc.package_url return self.doc._ref
python
def base_url(self): """Base URL for resolving resource URLs""" if self.doc.package_url: return self.doc.package_url return self.doc._ref
[ "def", "base_url", "(", "self", ")", ":", "if", "self", ".", "doc", ".", "package_url", ":", "return", "self", ".", "doc", ".", "package_url", "return", "self", ".", "doc", ".", "_ref" ]
Base URL for resolving resource URLs
[ "Base", "URL", "for", "resolving", "resource", "URLs" ]
8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6
https://github.com/Metatab/metapack/blob/8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6/metapack/terms.py#L29-L35
train
50,540
Metatab/metapack
metapack/terms.py
Resource.env
def env(self): """The execution context for rowprocessors and row-generating notebooks and functions. """ from copy import copy env = copy(self.doc.env) assert env is not None, 'Got a null execution context' env.update(self._envvar_env) env.update(self.all_props) return env
python
def env(self): """The execution context for rowprocessors and row-generating notebooks and functions. """ from copy import copy env = copy(self.doc.env) assert env is not None, 'Got a null execution context' env.update(self._envvar_env) env.update(self.all_props) return env
[ "def", "env", "(", "self", ")", ":", "from", "copy", "import", "copy", "env", "=", "copy", "(", "self", ".", "doc", ".", "env", ")", "assert", "env", "is", "not", "None", ",", "'Got a null execution context'", "env", ".", "update", "(", "self", ".", ...
The execution context for rowprocessors and row-generating notebooks and functions.
[ "The", "execution", "context", "for", "rowprocessors", "and", "row", "-", "generating", "notebooks", "and", "functions", "." ]
8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6
https://github.com/Metatab/metapack/blob/8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6/metapack/terms.py#L54-L66
train
50,541
Metatab/metapack
metapack/terms.py
Resource._resolved_url
def _resolved_url(self): """Return a URL that properly combines the base_url and a possibly relative resource url""" if not self.url: return None u = parse_app_url(self.url) if u.scheme == 'index': u = u.resolve() if u.scheme != 'file': # Hopefully means the URL is http, https, ftp, etc. return u elif u.resource_format == 'ipynb': # This shouldn't be a special case, but ... t = self.doc.package_url.inner.join_dir(self.url) t = t.as_type(type(u)) t.fragment = u.fragment return t elif u.proto == 'metatab': u = self.expanded_url return u.get_resource().get_target() elif u.proto == 'metapack': u = self.expanded_url if u.resource: return u.resource.resolved_url.get_resource().get_target() else: return u if u.scheme == 'file': return self.expanded_url elif False: assert isinstance(self.doc.package_url, MetapackPackageUrl), (type(self.doc.package_url), self.doc.package_url) try: t = self.doc.package_url.resolve_url(self.url) # Why are we doing this? # Also a hack t.scheme_extension = parse_app_url(self.url).scheme_extension # Another Hack! try: if not any(t.fragment) and any(u.fragment): t.fragment = u.fragment except TypeError: if not t.fragment and u.fragment: t.fragment = u.fragment # Yet more hack! t = parse_app_url(str(t)) return t except ResourceError as e: # This case happens when a filesystem packages has a non-standard metadata name # Total hack raise else: raise ResourceError('Unknown case for url {} '.format(self.url))
python
def _resolved_url(self): """Return a URL that properly combines the base_url and a possibly relative resource url""" if not self.url: return None u = parse_app_url(self.url) if u.scheme == 'index': u = u.resolve() if u.scheme != 'file': # Hopefully means the URL is http, https, ftp, etc. return u elif u.resource_format == 'ipynb': # This shouldn't be a special case, but ... t = self.doc.package_url.inner.join_dir(self.url) t = t.as_type(type(u)) t.fragment = u.fragment return t elif u.proto == 'metatab': u = self.expanded_url return u.get_resource().get_target() elif u.proto == 'metapack': u = self.expanded_url if u.resource: return u.resource.resolved_url.get_resource().get_target() else: return u if u.scheme == 'file': return self.expanded_url elif False: assert isinstance(self.doc.package_url, MetapackPackageUrl), (type(self.doc.package_url), self.doc.package_url) try: t = self.doc.package_url.resolve_url(self.url) # Why are we doing this? # Also a hack t.scheme_extension = parse_app_url(self.url).scheme_extension # Another Hack! try: if not any(t.fragment) and any(u.fragment): t.fragment = u.fragment except TypeError: if not t.fragment and u.fragment: t.fragment = u.fragment # Yet more hack! t = parse_app_url(str(t)) return t except ResourceError as e: # This case happens when a filesystem packages has a non-standard metadata name # Total hack raise else: raise ResourceError('Unknown case for url {} '.format(self.url))
[ "def", "_resolved_url", "(", "self", ")", ":", "if", "not", "self", ".", "url", ":", "return", "None", "u", "=", "parse_app_url", "(", "self", ".", "url", ")", "if", "u", ".", "scheme", "==", "'index'", ":", "u", "=", "u", ".", "resolve", "(", ")...
Return a URL that properly combines the base_url and a possibly relative resource url
[ "Return", "a", "URL", "that", "properly", "combines", "the", "base_url", "and", "a", "possibly", "relative", "resource", "url" ]
8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6
https://github.com/Metatab/metapack/blob/8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6/metapack/terms.py#L115-L190
train
50,542
Metatab/metapack
metapack/terms.py
Resource.schema_term
def schema_term(self): """Return the Table term for this resource, which is referenced either by the `table` property or the `schema` property""" if not self.name: raise MetapackError("Resource for url '{}' doe not have name".format(self.url)) t = self.doc.find_first('Root.Table', value=self.get_value('name')) frm = 'name' if not t: t = self.doc.find_first('Root.Table', value=self.get_value('schema')) frm = 'schema' if not t: frm = None return t
python
def schema_term(self): """Return the Table term for this resource, which is referenced either by the `table` property or the `schema` property""" if not self.name: raise MetapackError("Resource for url '{}' doe not have name".format(self.url)) t = self.doc.find_first('Root.Table', value=self.get_value('name')) frm = 'name' if not t: t = self.doc.find_first('Root.Table', value=self.get_value('schema')) frm = 'schema' if not t: frm = None return t
[ "def", "schema_term", "(", "self", ")", ":", "if", "not", "self", ".", "name", ":", "raise", "MetapackError", "(", "\"Resource for url '{}' doe not have name\"", ".", "format", "(", "self", ".", "url", ")", ")", "t", "=", "self", ".", "doc", ".", "find_fir...
Return the Table term for this resource, which is referenced either by the `table` property or the `schema` property
[ "Return", "the", "Table", "term", "for", "this", "resource", "which", "is", "referenced", "either", "by", "the", "table", "property", "or", "the", "schema", "property" ]
8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6
https://github.com/Metatab/metapack/blob/8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6/metapack/terms.py#L222-L239
train
50,543
Metatab/metapack
metapack/terms.py
Resource.headers
def headers(self): """Return the headers for the resource. Returns the AltName, if specified; if not, then the Name, and if that is empty, a name based on the column position. These headers are specifically applicable to the output table, and may not apply to the resource source. FOr those headers, use source_headers""" t = self.schema_term if t: return [self._name_for_col_term(c, i) for i, c in enumerate(t.children, 1) if c.term_is("Table.Column")] else: return None
python
def headers(self): """Return the headers for the resource. Returns the AltName, if specified; if not, then the Name, and if that is empty, a name based on the column position. These headers are specifically applicable to the output table, and may not apply to the resource source. FOr those headers, use source_headers""" t = self.schema_term if t: return [self._name_for_col_term(c, i) for i, c in enumerate(t.children, 1) if c.term_is("Table.Column")] else: return None
[ "def", "headers", "(", "self", ")", ":", "t", "=", "self", ".", "schema_term", "if", "t", ":", "return", "[", "self", ".", "_name_for_col_term", "(", "c", ",", "i", ")", "for", "i", ",", "c", "in", "enumerate", "(", "t", ".", "children", ",", "1"...
Return the headers for the resource. Returns the AltName, if specified; if not, then the Name, and if that is empty, a name based on the column position. These headers are specifically applicable to the output table, and may not apply to the resource source. FOr those headers, use source_headers
[ "Return", "the", "headers", "for", "the", "resource", ".", "Returns", "the", "AltName", "if", "specified", ";", "if", "not", "then", "the", "Name", "and", "if", "that", "is", "empty", "a", "name", "based", "on", "the", "column", "position", ".", "These",...
8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6
https://github.com/Metatab/metapack/blob/8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6/metapack/terms.py#L242-L254
train
50,544
Metatab/metapack
metapack/terms.py
Resource.source_headers
def source_headers(self): """"Returns the headers for the resource source. Specifically, does not include any header that is the EMPTY_SOURCE_HEADER value of _NONE_""" t = self.schema_term if t: return [self._name_for_col_term(c, i) for i, c in enumerate(t.children, 1) if c.term_is("Table.Column") and c.get_value('name') != EMPTY_SOURCE_HEADER ] else: return None
python
def source_headers(self): """"Returns the headers for the resource source. Specifically, does not include any header that is the EMPTY_SOURCE_HEADER value of _NONE_""" t = self.schema_term if t: return [self._name_for_col_term(c, i) for i, c in enumerate(t.children, 1) if c.term_is("Table.Column") and c.get_value('name') != EMPTY_SOURCE_HEADER ] else: return None
[ "def", "source_headers", "(", "self", ")", ":", "t", "=", "self", ".", "schema_term", "if", "t", ":", "return", "[", "self", ".", "_name_for_col_term", "(", "c", ",", "i", ")", "for", "i", ",", "c", "in", "enumerate", "(", "t", ".", "children", ","...
Returns the headers for the resource source. Specifically, does not include any header that is the EMPTY_SOURCE_HEADER value of _NONE_
[ "Returns", "the", "headers", "for", "the", "resource", "source", ".", "Specifically", "does", "not", "include", "any", "header", "that", "is", "the", "EMPTY_SOURCE_HEADER", "value", "of", "_NONE_" ]
8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6
https://github.com/Metatab/metapack/blob/8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6/metapack/terms.py#L257-L269
train
50,545
Metatab/metapack
metapack/terms.py
Resource.columns
def columns(self): """Return column information from the schema or from an upstreram package""" try: # For resources that are metapack packages. r = self.expanded_url.resource.columns() return list(r) except AttributeError as e: pass return self.schema_columns
python
def columns(self): """Return column information from the schema or from an upstreram package""" try: # For resources that are metapack packages. r = self.expanded_url.resource.columns() return list(r) except AttributeError as e: pass return self.schema_columns
[ "def", "columns", "(", "self", ")", ":", "try", ":", "# For resources that are metapack packages.", "r", "=", "self", ".", "expanded_url", ".", "resource", ".", "columns", "(", ")", "return", "list", "(", "r", ")", "except", "AttributeError", "as", "e", ":",...
Return column information from the schema or from an upstreram package
[ "Return", "column", "information", "from", "the", "schema", "or", "from", "an", "upstreram", "package" ]
8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6
https://github.com/Metatab/metapack/blob/8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6/metapack/terms.py#L273-L284
train
50,546
Metatab/metapack
metapack/terms.py
Resource.schema_columns
def schema_columns(self): """Return column informatino only from this schema""" t = self.schema_term columns = [] if t: for i, c in enumerate(t.children): if c.term_is("Table.Column"): p = c.all_props p['pos'] = i p['name'] = c.value p['header'] = self._name_for_col_term(c, i) columns.append(p) return columns
python
def schema_columns(self): """Return column informatino only from this schema""" t = self.schema_term columns = [] if t: for i, c in enumerate(t.children): if c.term_is("Table.Column"): p = c.all_props p['pos'] = i p['name'] = c.value p['header'] = self._name_for_col_term(c, i) columns.append(p) return columns
[ "def", "schema_columns", "(", "self", ")", ":", "t", "=", "self", ".", "schema_term", "columns", "=", "[", "]", "if", "t", ":", "for", "i", ",", "c", "in", "enumerate", "(", "t", ".", "children", ")", ":", "if", "c", ".", "term_is", "(", "\"Table...
Return column informatino only from this schema
[ "Return", "column", "informatino", "only", "from", "this", "schema" ]
8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6
https://github.com/Metatab/metapack/blob/8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6/metapack/terms.py#L287-L306
train
50,547
Metatab/metapack
metapack/terms.py
Resource.row_processor_table
def row_processor_table(self, ignore_none=False): """Create a row processor from the schema, to convert the text values from the CSV into real types""" from rowgenerators.rowpipe import Table type_map = { None: None, 'string': 'str', 'text': 'str', 'number': 'float', 'integer': 'int' } def map_type(v): return type_map.get(v, v) if self.schema_term: t = Table(self.get_value('name')) col_n = 0 for c in self.schema_term.children: if ignore_none and c.name == EMPTY_SOURCE_HEADER: continue if c.term_is('Table.Column'): t.add_column(self._name_for_col_term(c, col_n), datatype=map_type(c.get_value('datatype')), valuetype=map_type(c.get_value('valuetype')), transform=c.get_value('transform'), width=c.get_value('width') ) col_n += 1 return t else: return None
python
def row_processor_table(self, ignore_none=False): """Create a row processor from the schema, to convert the text values from the CSV into real types""" from rowgenerators.rowpipe import Table type_map = { None: None, 'string': 'str', 'text': 'str', 'number': 'float', 'integer': 'int' } def map_type(v): return type_map.get(v, v) if self.schema_term: t = Table(self.get_value('name')) col_n = 0 for c in self.schema_term.children: if ignore_none and c.name == EMPTY_SOURCE_HEADER: continue if c.term_is('Table.Column'): t.add_column(self._name_for_col_term(c, col_n), datatype=map_type(c.get_value('datatype')), valuetype=map_type(c.get_value('valuetype')), transform=c.get_value('transform'), width=c.get_value('width') ) col_n += 1 return t else: return None
[ "def", "row_processor_table", "(", "self", ",", "ignore_none", "=", "False", ")", ":", "from", "rowgenerators", ".", "rowpipe", "import", "Table", "type_map", "=", "{", "None", ":", "None", ",", "'string'", ":", "'str'", ",", "'text'", ":", "'str'", ",", ...
Create a row processor from the schema, to convert the text values from the CSV into real types
[ "Create", "a", "row", "processor", "from", "the", "schema", "to", "convert", "the", "text", "values", "from", "the", "CSV", "into", "real", "types" ]
8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6
https://github.com/Metatab/metapack/blob/8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6/metapack/terms.py#L308-L347
train
50,548
Metatab/metapack
metapack/terms.py
Resource.raw_row_generator
def raw_row_generator(self): """Like rowgenerator, but does not try to create a row processor table""" from rowgenerators import get_generator self.doc.set_sys_path() # Set sys path to package 'lib' dir in case of python function generator ru = self.resolved_url try: resource = ru.resource # For Metapack urls return resource.row_generator except AttributeError: pass ut = ru.get_resource().get_target() # Encoding is supposed to be preserved in the URL but isn't source_url = parse_app_url(self.url) # source_url will be None for Sql terms. ut.encoding = self.get_value('encoding') or (source_url.encoding if source_url else None) g = get_generator(ut, resource=self, doc=self._doc, working_dir=self._doc.doc_dir, env=self.env) assert g, ut return g
python
def raw_row_generator(self): """Like rowgenerator, but does not try to create a row processor table""" from rowgenerators import get_generator self.doc.set_sys_path() # Set sys path to package 'lib' dir in case of python function generator ru = self.resolved_url try: resource = ru.resource # For Metapack urls return resource.row_generator except AttributeError: pass ut = ru.get_resource().get_target() # Encoding is supposed to be preserved in the URL but isn't source_url = parse_app_url(self.url) # source_url will be None for Sql terms. ut.encoding = self.get_value('encoding') or (source_url.encoding if source_url else None) g = get_generator(ut, resource=self, doc=self._doc, working_dir=self._doc.doc_dir, env=self.env) assert g, ut return g
[ "def", "raw_row_generator", "(", "self", ")", ":", "from", "rowgenerators", "import", "get_generator", "self", ".", "doc", ".", "set_sys_path", "(", ")", "# Set sys path to package 'lib' dir in case of python function generator", "ru", "=", "self", ".", "resolved_url", ...
Like rowgenerator, but does not try to create a row processor table
[ "Like", "rowgenerator", "but", "does", "not", "try", "to", "create", "a", "row", "processor", "table" ]
8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6
https://github.com/Metatab/metapack/blob/8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6/metapack/terms.py#L350-L380
train
50,549
Metatab/metapack
metapack/terms.py
Resource._get_header
def _get_header(self): """Get the header from the deinfed header rows, for use on references or resources where the schema has not been run""" try: header_lines = [int(e) for e in str(self.get_value('headerlines', 0)).split(',')] except ValueError as e: header_lines = [0] # We're processing the raw datafile, with no schema. header_rows = islice(self.row_generator, min(header_lines), max(header_lines) + 1) from tableintuit import RowIntuiter headers = RowIntuiter.coalesce_headers(header_rows) return headers
python
def _get_header(self): """Get the header from the deinfed header rows, for use on references or resources where the schema has not been run""" try: header_lines = [int(e) for e in str(self.get_value('headerlines', 0)).split(',')] except ValueError as e: header_lines = [0] # We're processing the raw datafile, with no schema. header_rows = islice(self.row_generator, min(header_lines), max(header_lines) + 1) from tableintuit import RowIntuiter headers = RowIntuiter.coalesce_headers(header_rows) return headers
[ "def", "_get_header", "(", "self", ")", ":", "try", ":", "header_lines", "=", "[", "int", "(", "e", ")", "for", "e", "in", "str", "(", "self", ".", "get_value", "(", "'headerlines'", ",", "0", ")", ")", ".", "split", "(", "','", ")", "]", "except...
Get the header from the deinfed header rows, for use on references or resources where the schema has not been run
[ "Get", "the", "header", "from", "the", "deinfed", "header", "rows", "for", "use", "on", "references", "or", "resources", "where", "the", "schema", "has", "not", "been", "run" ]
8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6
https://github.com/Metatab/metapack/blob/8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6/metapack/terms.py#L426-L441
train
50,550
Metatab/metapack
metapack/terms.py
Resource.iterdict
def iterdict(self): """Iterate over the resource in dict records""" from collections import OrderedDict headers = None for row in self: if headers is None: headers = row continue yield OrderedDict(zip(headers, row))
python
def iterdict(self): """Iterate over the resource in dict records""" from collections import OrderedDict headers = None for row in self: if headers is None: headers = row continue yield OrderedDict(zip(headers, row))
[ "def", "iterdict", "(", "self", ")", ":", "from", "collections", "import", "OrderedDict", "headers", "=", "None", "for", "row", "in", "self", ":", "if", "headers", "is", "None", ":", "headers", "=", "row", "continue", "yield", "OrderedDict", "(", "zip", ...
Iterate over the resource in dict records
[ "Iterate", "over", "the", "resource", "in", "dict", "records" ]
8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6
https://github.com/Metatab/metapack/blob/8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6/metapack/terms.py#L493-L505
train
50,551
Metatab/metapack
metapack/terms.py
Resource.iterrows
def iterrows(self): """Iterate over the resource as row proxy objects, which allow acessing colums as attributes""" row_proxy = None headers = None for row in self: if not headers: headers = row row_proxy = RowProxy(headers) continue yield row_proxy.set_row(row)
python
def iterrows(self): """Iterate over the resource as row proxy objects, which allow acessing colums as attributes""" row_proxy = None headers = None for row in self: if not headers: headers = row row_proxy = RowProxy(headers) continue yield row_proxy.set_row(row)
[ "def", "iterrows", "(", "self", ")", ":", "row_proxy", "=", "None", "headers", "=", "None", "for", "row", "in", "self", ":", "if", "not", "headers", ":", "headers", "=", "row", "row_proxy", "=", "RowProxy", "(", "headers", ")", "continue", "yield", "ro...
Iterate over the resource as row proxy objects, which allow acessing colums as attributes
[ "Iterate", "over", "the", "resource", "as", "row", "proxy", "objects", "which", "allow", "acessing", "colums", "as", "attributes" ]
8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6
https://github.com/Metatab/metapack/blob/8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6/metapack/terms.py#L508-L524
train
50,552
Metatab/metapack
metapack/terms.py
Resource.iterrowproxy
def iterrowproxy(self, cls=RowProxy): """Iterate over the resource as row proxy objects, which allow acessing colums as attributes. Like iterrows, but allows for setting a specific RowProxy class. """ row_proxy = None headers = None for row in self: if not headers: headers = row row_proxy = cls(headers) continue yield row_proxy.set_row(row)
python
def iterrowproxy(self, cls=RowProxy): """Iterate over the resource as row proxy objects, which allow acessing colums as attributes. Like iterrows, but allows for setting a specific RowProxy class. """ row_proxy = None headers = None for row in self: if not headers: headers = row row_proxy = cls(headers) continue yield row_proxy.set_row(row)
[ "def", "iterrowproxy", "(", "self", ",", "cls", "=", "RowProxy", ")", ":", "row_proxy", "=", "None", "headers", "=", "None", "for", "row", "in", "self", ":", "if", "not", "headers", ":", "headers", "=", "row", "row_proxy", "=", "cls", "(", "headers", ...
Iterate over the resource as row proxy objects, which allow acessing colums as attributes. Like iterrows, but allows for setting a specific RowProxy class.
[ "Iterate", "over", "the", "resource", "as", "row", "proxy", "objects", "which", "allow", "acessing", "colums", "as", "attributes", ".", "Like", "iterrows", "but", "allows", "for", "setting", "a", "specific", "RowProxy", "class", "." ]
8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6
https://github.com/Metatab/metapack/blob/8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6/metapack/terms.py#L526-L541
train
50,553
Metatab/metapack
metapack/terms.py
Resource.iterstruct
def iterstruct(self): """Yield data structures built from the JSON header specifications in a table""" from rowgenerators.rowpipe.json import add_to_struct json_headers = self.json_headers for row in islice(self, 1, None): # islice skips header d = {} for pos, jh in json_headers: add_to_struct(d, jh, row[pos]) yield d
python
def iterstruct(self): """Yield data structures built from the JSON header specifications in a table""" from rowgenerators.rowpipe.json import add_to_struct json_headers = self.json_headers for row in islice(self, 1, None): # islice skips header d = {} for pos, jh in json_headers: add_to_struct(d, jh, row[pos]) yield d
[ "def", "iterstruct", "(", "self", ")", ":", "from", "rowgenerators", ".", "rowpipe", ".", "json", "import", "add_to_struct", "json_headers", "=", "self", ".", "json_headers", "for", "row", "in", "islice", "(", "self", ",", "1", ",", "None", ")", ":", "# ...
Yield data structures built from the JSON header specifications in a table
[ "Yield", "data", "structures", "built", "from", "the", "JSON", "header", "specifications", "in", "a", "table" ]
8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6
https://github.com/Metatab/metapack/blob/8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6/metapack/terms.py#L548-L558
train
50,554
Metatab/metapack
metapack/terms.py
Resource.iterjson
def iterjson(self, *args, **kwargs): """Yields the data structures from iterstruct as JSON strings""" from rowgenerators.rowpipe.json import VTEncoder import json if 'cls' not in kwargs: kwargs['cls'] = VTEncoder for s in self.iterstruct: yield (json.dumps(s, *args, **kwargs))
python
def iterjson(self, *args, **kwargs): """Yields the data structures from iterstruct as JSON strings""" from rowgenerators.rowpipe.json import VTEncoder import json if 'cls' not in kwargs: kwargs['cls'] = VTEncoder for s in self.iterstruct: yield (json.dumps(s, *args, **kwargs))
[ "def", "iterjson", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "from", "rowgenerators", ".", "rowpipe", ".", "json", "import", "VTEncoder", "import", "json", "if", "'cls'", "not", "in", "kwargs", ":", "kwargs", "[", "'cls'", "]", ...
Yields the data structures from iterstruct as JSON strings
[ "Yields", "the", "data", "structures", "from", "iterstruct", "as", "JSON", "strings" ]
8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6
https://github.com/Metatab/metapack/blob/8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6/metapack/terms.py#L560-L569
train
50,555
Metatab/metapack
metapack/terms.py
Resource.iteryaml
def iteryaml(self, *args, **kwargs): """Yields the data structures from iterstruct as YAML strings""" from rowgenerators.rowpipe.json import VTEncoder import yaml if 'cls' not in kwargs: kwargs['cls'] = VTEncoder for s in self.iterstruct: yield (yaml.safe_dump(s))
python
def iteryaml(self, *args, **kwargs): """Yields the data structures from iterstruct as YAML strings""" from rowgenerators.rowpipe.json import VTEncoder import yaml if 'cls' not in kwargs: kwargs['cls'] = VTEncoder for s in self.iterstruct: yield (yaml.safe_dump(s))
[ "def", "iteryaml", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "from", "rowgenerators", ".", "rowpipe", ".", "json", "import", "VTEncoder", "import", "yaml", "if", "'cls'", "not", "in", "kwargs", ":", "kwargs", "[", "'cls'", "]", ...
Yields the data structures from iterstruct as YAML strings
[ "Yields", "the", "data", "structures", "from", "iterstruct", "as", "YAML", "strings" ]
8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6
https://github.com/Metatab/metapack/blob/8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6/metapack/terms.py#L571-L580
train
50,556
Metatab/metapack
metapack/terms.py
Resource.dataframe
def dataframe(self, dtype=False, parse_dates=True, *args, **kwargs): """Return a pandas datafrome from the resource""" import pandas as pd rg = self.row_generator t = self.resolved_url.get_resource().get_target() if t.target_format == 'csv': return self.read_csv(dtype, parse_dates, *args, **kwargs) # Maybe generator has it's own Dataframe method() try: return rg.dataframe( *args, **kwargs) except AttributeError: pass # Just normal data, so use the iterator in this object. headers = next(islice(self, 0, 1)) data = islice(self, 1, None) df = pd.DataFrame(list(data), columns=headers, *args, **kwargs) self.errors = df.metatab_errors = rg.errors if hasattr(rg, 'errors') and rg.errors else {} return df
python
def dataframe(self, dtype=False, parse_dates=True, *args, **kwargs): """Return a pandas datafrome from the resource""" import pandas as pd rg = self.row_generator t = self.resolved_url.get_resource().get_target() if t.target_format == 'csv': return self.read_csv(dtype, parse_dates, *args, **kwargs) # Maybe generator has it's own Dataframe method() try: return rg.dataframe( *args, **kwargs) except AttributeError: pass # Just normal data, so use the iterator in this object. headers = next(islice(self, 0, 1)) data = islice(self, 1, None) df = pd.DataFrame(list(data), columns=headers, *args, **kwargs) self.errors = df.metatab_errors = rg.errors if hasattr(rg, 'errors') and rg.errors else {} return df
[ "def", "dataframe", "(", "self", ",", "dtype", "=", "False", ",", "parse_dates", "=", "True", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "import", "pandas", "as", "pd", "rg", "=", "self", ".", "row_generator", "t", "=", "self", ".", "reso...
Return a pandas datafrome from the resource
[ "Return", "a", "pandas", "datafrome", "from", "the", "resource" ]
8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6
https://github.com/Metatab/metapack/blob/8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6/metapack/terms.py#L585-L612
train
50,557
Metatab/metapack
metapack/terms.py
Resource.geoframe
def geoframe(self, *args, **kwargs): """Return a Geo dataframe""" from geopandas import GeoDataFrame import geopandas as gpd from shapely.geometry.polygon import BaseGeometry from shapely.wkt import loads gdf = None try: gdf = self.resolved_url.geoframe(*args, **kwargs) except AttributeError: pass if gdf is None: try: gdf = self.resolved_url.geo_generator.geoframe(*args, **kwargs) except AttributeError: pass if gdf is None: try: gdf = self.row_generator.geoframe(*args, **kwargs) except AttributeError: pass if gdf is None: try: gdf = GeoDataFrame(self.dataframe(*args, **kwargs)) first = next(gdf.iterrows())[1]['geometry'] if isinstance(first, str): # We have a GeoDataframe, but the geometry column is still strings, so # it must be converted shapes = [loads(row['geometry']) for i, row in gdf.iterrows()] elif not isinstance(first, BaseGeometry): # If we are reading a metatab package, the geometry column's type should be # 'geometry' which will give the geometry values class type of # rowpipe.valuetype.geo.ShapeValue. However, there are other # types of objects that have a 'shape' property. shapes = [row['geometry'].shape for i, row in gdf.iterrows()] else: shapes = gdf['geometry'] gdf['geometry'] = gpd.GeoSeries(shapes) gdf.set_geometry('geometry') # Wild guess. This case should be most often for Metatab processed geo files, # which are all 4326 if gdf.crs is None: gdf.crs = {'init': 'epsg:4326'} except KeyError as e: raise ResourceError("Failed to create GeoDataFrame for resource '{}': No geometry column".format(self.name)) except (KeyError,TypeError) as e: raise ResourceError("Failed to create GeoDataFrame for resource '{}': {}".format(self.name, str(e))) assert gdf.crs is not None return gdf
python
def geoframe(self, *args, **kwargs): """Return a Geo dataframe""" from geopandas import GeoDataFrame import geopandas as gpd from shapely.geometry.polygon import BaseGeometry from shapely.wkt import loads gdf = None try: gdf = self.resolved_url.geoframe(*args, **kwargs) except AttributeError: pass if gdf is None: try: gdf = self.resolved_url.geo_generator.geoframe(*args, **kwargs) except AttributeError: pass if gdf is None: try: gdf = self.row_generator.geoframe(*args, **kwargs) except AttributeError: pass if gdf is None: try: gdf = GeoDataFrame(self.dataframe(*args, **kwargs)) first = next(gdf.iterrows())[1]['geometry'] if isinstance(first, str): # We have a GeoDataframe, but the geometry column is still strings, so # it must be converted shapes = [loads(row['geometry']) for i, row in gdf.iterrows()] elif not isinstance(first, BaseGeometry): # If we are reading a metatab package, the geometry column's type should be # 'geometry' which will give the geometry values class type of # rowpipe.valuetype.geo.ShapeValue. However, there are other # types of objects that have a 'shape' property. shapes = [row['geometry'].shape for i, row in gdf.iterrows()] else: shapes = gdf['geometry'] gdf['geometry'] = gpd.GeoSeries(shapes) gdf.set_geometry('geometry') # Wild guess. This case should be most often for Metatab processed geo files, # which are all 4326 if gdf.crs is None: gdf.crs = {'init': 'epsg:4326'} except KeyError as e: raise ResourceError("Failed to create GeoDataFrame for resource '{}': No geometry column".format(self.name)) except (KeyError,TypeError) as e: raise ResourceError("Failed to create GeoDataFrame for resource '{}': {}".format(self.name, str(e))) assert gdf.crs is not None return gdf
[ "def", "geoframe", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "from", "geopandas", "import", "GeoDataFrame", "import", "geopandas", "as", "gpd", "from", "shapely", ".", "geometry", ".", "polygon", "import", "BaseGeometry", "from", "s...
Return a Geo dataframe
[ "Return", "a", "Geo", "dataframe" ]
8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6
https://github.com/Metatab/metapack/blob/8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6/metapack/terms.py#L614-L678
train
50,558
Metatab/metapack
metapack/terms.py
Resource.read_csv
def read_csv(self, dtype=False, parse_dates=True, *args, **kwargs): """Fetch the target and pass through to pandas.read_csv Don't provide the first argument of read_csv(); it is supplied internally. """ import pandas t = self.resolved_url.get_resource().get_target() kwargs = self._update_pandas_kwargs(dtype, parse_dates, kwargs) return pandas.read_csv(t.fspath, *args, **kwargs)
python
def read_csv(self, dtype=False, parse_dates=True, *args, **kwargs): """Fetch the target and pass through to pandas.read_csv Don't provide the first argument of read_csv(); it is supplied internally. """ import pandas t = self.resolved_url.get_resource().get_target() kwargs = self._update_pandas_kwargs(dtype, parse_dates, kwargs) return pandas.read_csv(t.fspath, *args, **kwargs)
[ "def", "read_csv", "(", "self", ",", "dtype", "=", "False", ",", "parse_dates", "=", "True", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "import", "pandas", "t", "=", "self", ".", "resolved_url", ".", "get_resource", "(", ")", ".", "get_targ...
Fetch the target and pass through to pandas.read_csv Don't provide the first argument of read_csv(); it is supplied internally.
[ "Fetch", "the", "target", "and", "pass", "through", "to", "pandas", ".", "read_csv" ]
8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6
https://github.com/Metatab/metapack/blob/8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6/metapack/terms.py#L718-L730
train
50,559
Metatab/metapack
metapack/terms.py
Resource.read_fwf
def read_fwf(self, *args, **kwargs): """Fetch the target and pass through to pandas.read_fwf. Don't provide the first argument of read_fwf(); it is supplied internally. """ import pandas t = self.resolved_url.get_resource().get_target() return pandas.read_fwf(t.fspath, *args, **kwargs)
python
def read_fwf(self, *args, **kwargs): """Fetch the target and pass through to pandas.read_fwf. Don't provide the first argument of read_fwf(); it is supplied internally. """ import pandas t = self.resolved_url.get_resource().get_target() return pandas.read_fwf(t.fspath, *args, **kwargs)
[ "def", "read_fwf", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "import", "pandas", "t", "=", "self", ".", "resolved_url", ".", "get_resource", "(", ")", ".", "get_target", "(", ")", "return", "pandas", ".", "read_fwf", "(", "t",...
Fetch the target and pass through to pandas.read_fwf. Don't provide the first argument of read_fwf(); it is supplied internally.
[ "Fetch", "the", "target", "and", "pass", "through", "to", "pandas", ".", "read_fwf", "." ]
8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6
https://github.com/Metatab/metapack/blob/8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6/metapack/terms.py#L732-L740
train
50,560
Metatab/metapack
metapack/terms.py
Resource.petl
def petl(self, *args, **kwargs): """Return a PETL source object""" import petl t = self.resolved_url.get_resource().get_target() if t.target_format == 'txt': return petl.fromtext(str(t.fspath), *args, **kwargs) elif t.target_format == 'csv': return petl.fromcsv(str(t.fspath), *args, **kwargs) else: raise Exception("Can't handle")
python
def petl(self, *args, **kwargs): """Return a PETL source object""" import petl t = self.resolved_url.get_resource().get_target() if t.target_format == 'txt': return petl.fromtext(str(t.fspath), *args, **kwargs) elif t.target_format == 'csv': return petl.fromcsv(str(t.fspath), *args, **kwargs) else: raise Exception("Can't handle")
[ "def", "petl", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "import", "petl", "t", "=", "self", ".", "resolved_url", ".", "get_resource", "(", ")", ".", "get_target", "(", ")", "if", "t", ".", "target_format", "==", "'txt'", ":...
Return a PETL source object
[ "Return", "a", "PETL", "source", "object" ]
8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6
https://github.com/Metatab/metapack/blob/8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6/metapack/terms.py#L749-L760
train
50,561
Metatab/metapack
metapack/terms.py
SqlQuery.context
def context(self): """Build the interpolation context from the schemas""" # Can't use self.columns b/c of recursion with resolved_url t = self.schema_term if not t: return {} sql_columns = [] all_columns = [] for i, c in enumerate(t.children): if c.term_is("Table.Column"): p = c.all_props if p.get('sqlselect'): # has a value for SqlSqlect sql_columns.append(p.get('sqlselect')) all_columns.append(c.name) return { 'SQL_COLUMNS': ', '.join(sql_columns), 'ALL_COLUMNS': ', '.join(all_columns) }
python
def context(self): """Build the interpolation context from the schemas""" # Can't use self.columns b/c of recursion with resolved_url t = self.schema_term if not t: return {} sql_columns = [] all_columns = [] for i, c in enumerate(t.children): if c.term_is("Table.Column"): p = c.all_props if p.get('sqlselect'): # has a value for SqlSqlect sql_columns.append(p.get('sqlselect')) all_columns.append(c.name) return { 'SQL_COLUMNS': ', '.join(sql_columns), 'ALL_COLUMNS': ', '.join(all_columns) }
[ "def", "context", "(", "self", ")", ":", "# Can't use self.columns b/c of recursion with resolved_url", "t", "=", "self", ".", "schema_term", "if", "not", "t", ":", "return", "{", "}", "sql_columns", "=", "[", "]", "all_columns", "=", "[", "]", "for", "i", "...
Build the interpolation context from the schemas
[ "Build", "the", "interpolation", "context", "from", "the", "schemas" ]
8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6
https://github.com/Metatab/metapack/blob/8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6/metapack/terms.py#L839-L864
train
50,562
Metatab/metapack
metapack/jupyter/ipython.py
caller_locals
def caller_locals(): """Get the local variables in the caller's frame.""" import inspect frame = inspect.currentframe() try: return frame.f_back.f_back.f_locals finally: del frame
python
def caller_locals(): """Get the local variables in the caller's frame.""" import inspect frame = inspect.currentframe() try: return frame.f_back.f_back.f_locals finally: del frame
[ "def", "caller_locals", "(", ")", ":", "import", "inspect", "frame", "=", "inspect", ".", "currentframe", "(", ")", "try", ":", "return", "frame", ".", "f_back", ".", "f_back", ".", "f_locals", "finally", ":", "del", "frame" ]
Get the local variables in the caller's frame.
[ "Get", "the", "local", "variables", "in", "the", "caller", "s", "frame", "." ]
8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6
https://github.com/Metatab/metapack/blob/8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6/metapack/jupyter/ipython.py#L15-L22
train
50,563
Metatab/metapack
metapack/jupyter/ipython.py
open_package
def open_package(locals=None, dr=None): """Try to open a package with the metatab_doc variable, which is set when a Notebook is run as a resource. If that does not exist, try the local _packages directory""" if locals is None: locals = caller_locals() try: # Running in a package build return op(locals['metatab_doc']) except KeyError: # Running interactively in Jupyter package_name = None build_package_dir = None source_package = None if dr is None: dr = getcwd() for i, e in enumerate(walk_up(dr)): intr = set([DEFAULT_METATAB_FILE, LINES_METATAB_FILE, IPYNB_METATAB_FILE]) & set(e[2]) if intr: source_package = join(e[0], list(intr)[0]) p = op(source_package) package_name = p.find_first_value("Root.Name") if not package_name: raise PackageError("Source package in {} does not have root.Name term".format(e[0])) if PACKAGE_PREFIX in e[1]: build_package_dir = join(e[0], PACKAGE_PREFIX) break if i > 2: break if build_package_dir and package_name and exists(join(build_package_dir, package_name)): # Open the previously built package built_package = join(build_package_dir, package_name) try: return op(built_package) except RowGeneratorError as e: pass # Probably could not open the metadata file. if source_package: # Open the source package return op(source_package) raise PackageError("Failed to find package, either in locals() or above dir '{}' ".format(dr))
python
def open_package(locals=None, dr=None): """Try to open a package with the metatab_doc variable, which is set when a Notebook is run as a resource. If that does not exist, try the local _packages directory""" if locals is None: locals = caller_locals() try: # Running in a package build return op(locals['metatab_doc']) except KeyError: # Running interactively in Jupyter package_name = None build_package_dir = None source_package = None if dr is None: dr = getcwd() for i, e in enumerate(walk_up(dr)): intr = set([DEFAULT_METATAB_FILE, LINES_METATAB_FILE, IPYNB_METATAB_FILE]) & set(e[2]) if intr: source_package = join(e[0], list(intr)[0]) p = op(source_package) package_name = p.find_first_value("Root.Name") if not package_name: raise PackageError("Source package in {} does not have root.Name term".format(e[0])) if PACKAGE_PREFIX in e[1]: build_package_dir = join(e[0], PACKAGE_PREFIX) break if i > 2: break if build_package_dir and package_name and exists(join(build_package_dir, package_name)): # Open the previously built package built_package = join(build_package_dir, package_name) try: return op(built_package) except RowGeneratorError as e: pass # Probably could not open the metadata file. if source_package: # Open the source package return op(source_package) raise PackageError("Failed to find package, either in locals() or above dir '{}' ".format(dr))
[ "def", "open_package", "(", "locals", "=", "None", ",", "dr", "=", "None", ")", ":", "if", "locals", "is", "None", ":", "locals", "=", "caller_locals", "(", ")", "try", ":", "# Running in a package build", "return", "op", "(", "locals", "[", "'metatab_doc'...
Try to open a package with the metatab_doc variable, which is set when a Notebook is run as a resource. If that does not exist, try the local _packages directory
[ "Try", "to", "open", "a", "package", "with", "the", "metatab_doc", "variable", "which", "is", "set", "when", "a", "Notebook", "is", "run", "as", "a", "resource", ".", "If", "that", "does", "not", "exist", "try", "the", "local", "_packages", "directory" ]
8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6
https://github.com/Metatab/metapack/blob/8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6/metapack/jupyter/ipython.py#L48-L103
train
50,564
Metatab/metapack
metapack/jupyter/ipython.py
rebuild_schema
def rebuild_schema(doc, r, df): """Rebuild the schema for a resource based on a dataframe""" import numpy as np # Re-get the resource in the doc, since it may be different. try: r = doc.resource(r.name) except AttributeError: # Maybe r is actually a resource name r = doc.resource(r) def alt_col_name(name, i): import re if not name: return 'col{}'.format(i) return re.sub('_+', '_', re.sub('[^\w_]', '_', str(name)).lower()).rstrip('_') df_types = { np.dtype('O'): 'text', np.dtype('int64'): 'integer', np.dtype('float64'): 'number' } try: df_index_frame = df.index.to_frame() except AttributeError: df_index_frame = None def get_col_dtype(c): c = str(c) try: return df_types[df[c].dtype] except KeyError: # Maybe it is in the index? pass try: return df_types[df_index_frame[c].dtype] except TypeError: # Maybe not a multi-index pass if c == 'id' or c == df.index.name: return df_types[df.index.dtype] return 'unknown' columns = [] schema_term = r.schema_term[0] if schema_term: old_cols = {c['name'].value: c.properties for c in schema_term.children} for c in schema_term.children: schema_term.remove_child(c) schema_term.children = [] else: old_cols = {} schema_term = doc['Schema'].new_term('Table', r.schema_name) index_names = [n if n else "id" for n in df.index.names] for i, col in enumerate(index_names + list(df.columns)): acn = alt_col_name(col, i) if alt_col_name(col, i) != str(col) else '' d = {'name': col, 'datatype': get_col_dtype(col), 'altname': acn} if col in old_cols.keys(): lookup_name = col elif acn in old_cols.keys(): lookup_name = acn else: lookup_name = None if lookup_name and lookup_name in old_cols: for k, v in schema_term.properties.items(): old_col = old_cols.get(lookup_name) for k, v in old_col.items(): if k != 'name' and v: d[k] = v columns.append(d) for c in columns: name = c['name'] del c['name'] datatype = c['datatype'] del c['datatype'] altname = c['altname'] del c['altname'] schema_term.new_child('Column', name, datatype=datatype, altname=altname, **c)
python
def rebuild_schema(doc, r, df): """Rebuild the schema for a resource based on a dataframe""" import numpy as np # Re-get the resource in the doc, since it may be different. try: r = doc.resource(r.name) except AttributeError: # Maybe r is actually a resource name r = doc.resource(r) def alt_col_name(name, i): import re if not name: return 'col{}'.format(i) return re.sub('_+', '_', re.sub('[^\w_]', '_', str(name)).lower()).rstrip('_') df_types = { np.dtype('O'): 'text', np.dtype('int64'): 'integer', np.dtype('float64'): 'number' } try: df_index_frame = df.index.to_frame() except AttributeError: df_index_frame = None def get_col_dtype(c): c = str(c) try: return df_types[df[c].dtype] except KeyError: # Maybe it is in the index? pass try: return df_types[df_index_frame[c].dtype] except TypeError: # Maybe not a multi-index pass if c == 'id' or c == df.index.name: return df_types[df.index.dtype] return 'unknown' columns = [] schema_term = r.schema_term[0] if schema_term: old_cols = {c['name'].value: c.properties for c in schema_term.children} for c in schema_term.children: schema_term.remove_child(c) schema_term.children = [] else: old_cols = {} schema_term = doc['Schema'].new_term('Table', r.schema_name) index_names = [n if n else "id" for n in df.index.names] for i, col in enumerate(index_names + list(df.columns)): acn = alt_col_name(col, i) if alt_col_name(col, i) != str(col) else '' d = {'name': col, 'datatype': get_col_dtype(col), 'altname': acn} if col in old_cols.keys(): lookup_name = col elif acn in old_cols.keys(): lookup_name = acn else: lookup_name = None if lookup_name and lookup_name in old_cols: for k, v in schema_term.properties.items(): old_col = old_cols.get(lookup_name) for k, v in old_col.items(): if k != 'name' and v: d[k] = v columns.append(d) for c in columns: name = c['name'] del c['name'] datatype = c['datatype'] del c['datatype'] altname = c['altname'] del c['altname'] schema_term.new_child('Column', name, datatype=datatype, altname=altname, **c)
[ "def", "rebuild_schema", "(", "doc", ",", "r", ",", "df", ")", ":", "import", "numpy", "as", "np", "# Re-get the resource in the doc, since it may be different.", "try", ":", "r", "=", "doc", ".", "resource", "(", "r", ".", "name", ")", "except", "AttributeErr...
Rebuild the schema for a resource based on a dataframe
[ "Rebuild", "the", "schema", "for", "a", "resource", "based", "on", "a", "dataframe" ]
8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6
https://github.com/Metatab/metapack/blob/8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6/metapack/jupyter/ipython.py#L106-L206
train
50,565
Metatab/metapack
metapack/jupyter/ipython.py
rewrite_schema
def rewrite_schema(r, df, doc=None): """Rebuild the schema for a resource based on a dataframe and re-write the doc""" from metapack.cli.core import write_doc if doc is None: doc = open_source_package() rebuild_schema(doc, r, df) write_doc(doc, doc.ref)
python
def rewrite_schema(r, df, doc=None): """Rebuild the schema for a resource based on a dataframe and re-write the doc""" from metapack.cli.core import write_doc if doc is None: doc = open_source_package() rebuild_schema(doc, r, df) write_doc(doc, doc.ref)
[ "def", "rewrite_schema", "(", "r", ",", "df", ",", "doc", "=", "None", ")", ":", "from", "metapack", ".", "cli", ".", "core", "import", "write_doc", "if", "doc", "is", "None", ":", "doc", "=", "open_source_package", "(", ")", "rebuild_schema", "(", "do...
Rebuild the schema for a resource based on a dataframe and re-write the doc
[ "Rebuild", "the", "schema", "for", "a", "resource", "based", "on", "a", "dataframe", "and", "re", "-", "write", "the", "doc" ]
8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6
https://github.com/Metatab/metapack/blob/8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6/metapack/jupyter/ipython.py#L209-L219
train
50,566
Metatab/metapack
metapack/jupyter/ipython.py
interactive_rewrite_schema
def interactive_rewrite_schema(r, df, doc=None): """Rebuild the schema for a resource based on a dataframe and re-write the doc, but only if running the notebook interactively, not while building""" if 'metatab_doc' in caller_locals(): return False if doc is None: doc = open_source_package() rewrite_schema(r, df, doc) return True
python
def interactive_rewrite_schema(r, df, doc=None): """Rebuild the schema for a resource based on a dataframe and re-write the doc, but only if running the notebook interactively, not while building""" if 'metatab_doc' in caller_locals(): return False if doc is None: doc = open_source_package() rewrite_schema(r, df, doc) return True
[ "def", "interactive_rewrite_schema", "(", "r", ",", "df", ",", "doc", "=", "None", ")", ":", "if", "'metatab_doc'", "in", "caller_locals", "(", ")", ":", "return", "False", "if", "doc", "is", "None", ":", "doc", "=", "open_source_package", "(", ")", "rew...
Rebuild the schema for a resource based on a dataframe and re-write the doc, but only if running the notebook interactively, not while building
[ "Rebuild", "the", "schema", "for", "a", "resource", "based", "on", "a", "dataframe", "and", "re", "-", "write", "the", "doc", "but", "only", "if", "running", "the", "notebook", "interactively", "not", "while", "building" ]
8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6
https://github.com/Metatab/metapack/blob/8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6/metapack/jupyter/ipython.py#L222-L234
train
50,567
Metatab/metapack
metapack/jupyter/ipython.py
get_dataframes
def get_dataframes(): """Yield tuples of dataframe variable name and the dataframe. Skips variables with names that start with an underscore""" for k, v in caller_locals().items(): if k.startswith('_'): continue if isinstance(v, pd.core.frame.DataFrame): yield k, v
python
def get_dataframes(): """Yield tuples of dataframe variable name and the dataframe. Skips variables with names that start with an underscore""" for k, v in caller_locals().items(): if k.startswith('_'): continue if isinstance(v, pd.core.frame.DataFrame): yield k, v
[ "def", "get_dataframes", "(", ")", ":", "for", "k", ",", "v", "in", "caller_locals", "(", ")", ".", "items", "(", ")", ":", "if", "k", ".", "startswith", "(", "'_'", ")", ":", "continue", "if", "isinstance", "(", "v", ",", "pd", ".", "core", ".",...
Yield tuples of dataframe variable name and the dataframe. Skips variables with names that start with an underscore
[ "Yield", "tuples", "of", "dataframe", "variable", "name", "and", "the", "dataframe", ".", "Skips", "variables", "with", "names", "that", "start", "with", "an", "underscore" ]
8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6
https://github.com/Metatab/metapack/blob/8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6/metapack/jupyter/ipython.py#L237-L246
train
50,568
Metatab/metapack
metapack/jupyter/ipython.py
get_notebook_rel_path
def get_notebook_rel_path(pkg=None): """Get the path of a notebook, relative to the current soruce package""" pkg = pkg or open_source_package() pkg_path = str(pkg.package_url.fspath) nb_path = get_notebook_path() return nb_path.replace(pkg_path, '').strip('/')
python
def get_notebook_rel_path(pkg=None): """Get the path of a notebook, relative to the current soruce package""" pkg = pkg or open_source_package() pkg_path = str(pkg.package_url.fspath) nb_path = get_notebook_path() return nb_path.replace(pkg_path, '').strip('/')
[ "def", "get_notebook_rel_path", "(", "pkg", "=", "None", ")", ":", "pkg", "=", "pkg", "or", "open_source_package", "(", ")", "pkg_path", "=", "str", "(", "pkg", ".", "package_url", ".", "fspath", ")", "nb_path", "=", "get_notebook_path", "(", ")", "return"...
Get the path of a notebook, relative to the current soruce package
[ "Get", "the", "path", "of", "a", "notebook", "relative", "to", "the", "current", "soruce", "package" ]
8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6
https://github.com/Metatab/metapack/blob/8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6/metapack/jupyter/ipython.py#L273-L279
train
50,569
Metatab/metapack
metapack/jupyter/ipython.py
add_dataframe
def add_dataframe(df, name, pkg=None, description=''): """Add a dataframe to a source package. Pass in either the name of the dataframe, or the dataframe. If the dataframeis passed it, the name will be the dataframe's variable name. The function will re-write the source package with the new resource. """ from warnings import warn from metapack.cli.core import alt_col_name, type_map import numpy as np if name is None or df is None: warn("Did not find dataframe for reference '{}' ".format(ref)) return pkg = pkg or open_source_package() resource_ref = 'file:' + get_notebook_rel_path(pkg) + '#' + name t = pkg.find_first('Root.Datafile', value=resource_ref) col_props = {} if t: print("Datafile exists for url '{}', deleting".format(resource_ref)) if t.schema_term: col_props = { c['name']:c for c in t.columns()} pkg.remove_term(t.schema_term) pkg.remove_term(t) t = pkg['Resources'].new_term('Root.Datafile', resource_ref, name=name, description=description) st = pkg['Schema'].new_term('Table', t.schema_name, description=description) for i, name in enumerate(df.columns): props = col_props.get(name,{}) try: native_type = type(np.asscalar(df[name].dtype.type(0))).__name__ except ValueError: native_type = df[name].dtype.name except AttributeError: native_type = type(df[name][0]).__name__ for pn in 'datatype name pos header'.split(): if pn in props: del props[pn] if 'altname' in props: altname = props['altname'] del props['altname'] else: raw_alt_name = alt_col_name(name, i) altname = raw_alt_name if raw_alt_name != name else '' col = df[name] if hasattr(col, 'description'): # custom property props['description'] = col.description t = st.new_child('Column', name, datatype=type_map.get(native_type, native_type), altname=altname, **props) pkg.write_csv()
python
def add_dataframe(df, name, pkg=None, description=''): """Add a dataframe to a source package. Pass in either the name of the dataframe, or the dataframe. If the dataframeis passed it, the name will be the dataframe's variable name. The function will re-write the source package with the new resource. """ from warnings import warn from metapack.cli.core import alt_col_name, type_map import numpy as np if name is None or df is None: warn("Did not find dataframe for reference '{}' ".format(ref)) return pkg = pkg or open_source_package() resource_ref = 'file:' + get_notebook_rel_path(pkg) + '#' + name t = pkg.find_first('Root.Datafile', value=resource_ref) col_props = {} if t: print("Datafile exists for url '{}', deleting".format(resource_ref)) if t.schema_term: col_props = { c['name']:c for c in t.columns()} pkg.remove_term(t.schema_term) pkg.remove_term(t) t = pkg['Resources'].new_term('Root.Datafile', resource_ref, name=name, description=description) st = pkg['Schema'].new_term('Table', t.schema_name, description=description) for i, name in enumerate(df.columns): props = col_props.get(name,{}) try: native_type = type(np.asscalar(df[name].dtype.type(0))).__name__ except ValueError: native_type = df[name].dtype.name except AttributeError: native_type = type(df[name][0]).__name__ for pn in 'datatype name pos header'.split(): if pn in props: del props[pn] if 'altname' in props: altname = props['altname'] del props['altname'] else: raw_alt_name = alt_col_name(name, i) altname = raw_alt_name if raw_alt_name != name else '' col = df[name] if hasattr(col, 'description'): # custom property props['description'] = col.description t = st.new_child('Column', name, datatype=type_map.get(native_type, native_type), altname=altname, **props) pkg.write_csv()
[ "def", "add_dataframe", "(", "df", ",", "name", ",", "pkg", "=", "None", ",", "description", "=", "''", ")", ":", "from", "warnings", "import", "warn", "from", "metapack", ".", "cli", ".", "core", "import", "alt_col_name", ",", "type_map", "import", "num...
Add a dataframe to a source package. Pass in either the name of the dataframe, or the dataframe. If the dataframeis passed it, the name will be the dataframe's variable name. The function will re-write the source package with the new resource.
[ "Add", "a", "dataframe", "to", "a", "source", "package", ".", "Pass", "in", "either", "the", "name", "of", "the", "dataframe", "or", "the", "dataframe", ".", "If", "the", "dataframeis", "passed", "it", "the", "name", "will", "be", "the", "dataframe", "s"...
8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6
https://github.com/Metatab/metapack/blob/8365f221fbeaa3c0be9091f2eaf3447fd8e2e8d6/metapack/jupyter/ipython.py#L282-L349
train
50,570
project-rig/rig
rig/utils/docstrings.py
add_int_enums_to_docstring
def add_int_enums_to_docstring(enum): """Decorator for IntEnum which re-writes the documentation string so that Sphinx enumerates all the enumeration values. This is a work-around for Sphinx autodoc's inability to properly document IntEnums. This decorator adds enumeration names and values to the 'Attributes' section of the docstring of the decorated IntEnum class. Example:: >>> from enum import IntEnum >>> @add_int_enums_to_docstring ... class MyIntEnum(IntEnum): ... '''An example IntEnum.''' ... a = 0 ... b = 1 >>> print(MyIntEnum.__doc__) An example IntEnum. <BLANKLINE> Attributes ---------- a = 0 b = 1 <BLANKLINE> """ # The enum34 library (used for compatibility with Python < v3.4) rather # oddly set its docstring to None rather than some senible but empty # default... if enum.__doc__ is None: # pragma: nocover enum.__doc__ = "" enum.__doc__ += ("\n\n" "Attributes\n" "----------\n") for val in list(enum): enum.__doc__ += "{} = {}\n".format(val.name, int(val)) return enum
python
def add_int_enums_to_docstring(enum): """Decorator for IntEnum which re-writes the documentation string so that Sphinx enumerates all the enumeration values. This is a work-around for Sphinx autodoc's inability to properly document IntEnums. This decorator adds enumeration names and values to the 'Attributes' section of the docstring of the decorated IntEnum class. Example:: >>> from enum import IntEnum >>> @add_int_enums_to_docstring ... class MyIntEnum(IntEnum): ... '''An example IntEnum.''' ... a = 0 ... b = 1 >>> print(MyIntEnum.__doc__) An example IntEnum. <BLANKLINE> Attributes ---------- a = 0 b = 1 <BLANKLINE> """ # The enum34 library (used for compatibility with Python < v3.4) rather # oddly set its docstring to None rather than some senible but empty # default... if enum.__doc__ is None: # pragma: nocover enum.__doc__ = "" enum.__doc__ += ("\n\n" "Attributes\n" "----------\n") for val in list(enum): enum.__doc__ += "{} = {}\n".format(val.name, int(val)) return enum
[ "def", "add_int_enums_to_docstring", "(", "enum", ")", ":", "# The enum34 library (used for compatibility with Python < v3.4) rather", "# oddly set its docstring to None rather than some senible but empty", "# default...", "if", "enum", ".", "__doc__", "is", "None", ":", "# pragma: n...
Decorator for IntEnum which re-writes the documentation string so that Sphinx enumerates all the enumeration values. This is a work-around for Sphinx autodoc's inability to properly document IntEnums. This decorator adds enumeration names and values to the 'Attributes' section of the docstring of the decorated IntEnum class. Example:: >>> from enum import IntEnum >>> @add_int_enums_to_docstring ... class MyIntEnum(IntEnum): ... '''An example IntEnum.''' ... a = 0 ... b = 1 >>> print(MyIntEnum.__doc__) An example IntEnum. <BLANKLINE> Attributes ---------- a = 0 b = 1 <BLANKLINE>
[ "Decorator", "for", "IntEnum", "which", "re", "-", "writes", "the", "documentation", "string", "so", "that", "Sphinx", "enumerates", "all", "the", "enumeration", "values", "." ]
3a3e053d3214899b6d68758685835de0afd5542b
https://github.com/project-rig/rig/blob/3a3e053d3214899b6d68758685835de0afd5542b/rig/utils/docstrings.py#L12-L51
train
50,571
project-rig/rig
rig/utils/docstrings.py
add_signature_to_docstring
def add_signature_to_docstring(f, include_self=False, kw_only_args={}): """Decorator which adds the function signature of 'f' to the decorated function's docstring. Under Python 2, wrapping a function (even using functools.wraps) hides its signature to Sphinx's introspection tools so it is necessary to include the function signature in the docstring to enable Sphinx to render it correctly. Additionally, when building decorators which change a function's signature, it is non-trivial modify the wrapper's function signature and so automatically generated documentation will not display the correct signature. This decorator can aid in the specific case where a wrapper adds keyword-only arguments to the set of arguments accepted by the underlying function. For example:: >>> def my_func(a, b=0, *args, **kwargs): ... '''An example function.''' ... pass >>> import functools >>> @add_signature_to_docstring(my_func, kw_only_args={"c": 1}) ... @functools.wraps(my_func) ... def my_func_wrapper(*args, **kwargs): ... c = kwargs.pop("c") ... # ...do something with c... ... return my_func(*args, **kwargs) >>> print(my_func_wrapper.__doc__) my_func(a, b=0, *args, c=1, **kwargs) An example function. .. warning:: This function only works with functions which do not have any named keyword-only arguments. For example this function cannot be handled:: def f(*args, kw_only_arg=123) This is due to a limitation in the underlying introspection library provided in Python 2. Parameters ---------- f : function The function whose signature will be used. Need not be the same as the decorated function. include_self : bool Should an initial 'self' arguments be included in the signature? (These are assumed to be arguments called 'self' without a default value). kw_only_args : dict Optionally, add a set of keyword-only arguments to the function signature. This is useful if the wrapper function adds new keyword-only arguments. """ def decorate(f_wrapper): args, varargs, keywords, defaults = inspect.getargspec(f) # Simplifies later logic if defaults is None: defaults = [] # Make sure the keyword only arguments don't use the names of any other # arguments assert set(args).isdisjoint(set(kw_only_args)) assert varargs is None or varargs not in kw_only_args assert keywords is None or keywords not in kw_only_args # If required, remove the initial 'self' argument (e.g. for methods) if not include_self: if (len(args) >= 1 and args[0] == "self" and len(args) > len(defaults)): args.pop(0) # Assemble a string representation of the signature. This must be done # by hand (rather than using formatargspec) to allow the assembly of # signatures with keyword-only values. signature = "{}(".format(f_wrapper.__name__) for arg in args[:-len(defaults)] if defaults else args: signature += "{}, ".format(arg) for arg, default in zip(args[-len(defaults):], defaults): signature += "{}={}, ".format(arg, repr(default)) if kw_only_args or varargs is not None: # Must include a varargs name if keyword only arguments are # supplied. if varargs is None and kw_only_args: assert "_" not in args assert "_" not in kw_only_args assert "_" != keywords signature += "*_, " else: signature += "*{}, ".format(varargs) for keyword, default in iteritems(kw_only_args): signature += "{}={}, ".format(keyword, default) if keywords is not None: signature += "**{}, ".format(keywords) signature = "{})".format(signature.rstrip(", ")) # Only add the signature if one is not already present. if f_wrapper.__doc__ is None: f_wrapper.__doc__ = signature elif not f_wrapper.__doc__.lstrip().startswith( "{}(".format(f_wrapper.__name__)): f_wrapper.__doc__ = "{}\n{}".format(signature, f_wrapper.__doc__) # Return the original function (after modifying its __doc__) return f_wrapper return decorate
python
def add_signature_to_docstring(f, include_self=False, kw_only_args={}): """Decorator which adds the function signature of 'f' to the decorated function's docstring. Under Python 2, wrapping a function (even using functools.wraps) hides its signature to Sphinx's introspection tools so it is necessary to include the function signature in the docstring to enable Sphinx to render it correctly. Additionally, when building decorators which change a function's signature, it is non-trivial modify the wrapper's function signature and so automatically generated documentation will not display the correct signature. This decorator can aid in the specific case where a wrapper adds keyword-only arguments to the set of arguments accepted by the underlying function. For example:: >>> def my_func(a, b=0, *args, **kwargs): ... '''An example function.''' ... pass >>> import functools >>> @add_signature_to_docstring(my_func, kw_only_args={"c": 1}) ... @functools.wraps(my_func) ... def my_func_wrapper(*args, **kwargs): ... c = kwargs.pop("c") ... # ...do something with c... ... return my_func(*args, **kwargs) >>> print(my_func_wrapper.__doc__) my_func(a, b=0, *args, c=1, **kwargs) An example function. .. warning:: This function only works with functions which do not have any named keyword-only arguments. For example this function cannot be handled:: def f(*args, kw_only_arg=123) This is due to a limitation in the underlying introspection library provided in Python 2. Parameters ---------- f : function The function whose signature will be used. Need not be the same as the decorated function. include_self : bool Should an initial 'self' arguments be included in the signature? (These are assumed to be arguments called 'self' without a default value). kw_only_args : dict Optionally, add a set of keyword-only arguments to the function signature. This is useful if the wrapper function adds new keyword-only arguments. """ def decorate(f_wrapper): args, varargs, keywords, defaults = inspect.getargspec(f) # Simplifies later logic if defaults is None: defaults = [] # Make sure the keyword only arguments don't use the names of any other # arguments assert set(args).isdisjoint(set(kw_only_args)) assert varargs is None or varargs not in kw_only_args assert keywords is None or keywords not in kw_only_args # If required, remove the initial 'self' argument (e.g. for methods) if not include_self: if (len(args) >= 1 and args[0] == "self" and len(args) > len(defaults)): args.pop(0) # Assemble a string representation of the signature. This must be done # by hand (rather than using formatargspec) to allow the assembly of # signatures with keyword-only values. signature = "{}(".format(f_wrapper.__name__) for arg in args[:-len(defaults)] if defaults else args: signature += "{}, ".format(arg) for arg, default in zip(args[-len(defaults):], defaults): signature += "{}={}, ".format(arg, repr(default)) if kw_only_args or varargs is not None: # Must include a varargs name if keyword only arguments are # supplied. if varargs is None and kw_only_args: assert "_" not in args assert "_" not in kw_only_args assert "_" != keywords signature += "*_, " else: signature += "*{}, ".format(varargs) for keyword, default in iteritems(kw_only_args): signature += "{}={}, ".format(keyword, default) if keywords is not None: signature += "**{}, ".format(keywords) signature = "{})".format(signature.rstrip(", ")) # Only add the signature if one is not already present. if f_wrapper.__doc__ is None: f_wrapper.__doc__ = signature elif not f_wrapper.__doc__.lstrip().startswith( "{}(".format(f_wrapper.__name__)): f_wrapper.__doc__ = "{}\n{}".format(signature, f_wrapper.__doc__) # Return the original function (after modifying its __doc__) return f_wrapper return decorate
[ "def", "add_signature_to_docstring", "(", "f", ",", "include_self", "=", "False", ",", "kw_only_args", "=", "{", "}", ")", ":", "def", "decorate", "(", "f_wrapper", ")", ":", "args", ",", "varargs", ",", "keywords", ",", "defaults", "=", "inspect", ".", ...
Decorator which adds the function signature of 'f' to the decorated function's docstring. Under Python 2, wrapping a function (even using functools.wraps) hides its signature to Sphinx's introspection tools so it is necessary to include the function signature in the docstring to enable Sphinx to render it correctly. Additionally, when building decorators which change a function's signature, it is non-trivial modify the wrapper's function signature and so automatically generated documentation will not display the correct signature. This decorator can aid in the specific case where a wrapper adds keyword-only arguments to the set of arguments accepted by the underlying function. For example:: >>> def my_func(a, b=0, *args, **kwargs): ... '''An example function.''' ... pass >>> import functools >>> @add_signature_to_docstring(my_func, kw_only_args={"c": 1}) ... @functools.wraps(my_func) ... def my_func_wrapper(*args, **kwargs): ... c = kwargs.pop("c") ... # ...do something with c... ... return my_func(*args, **kwargs) >>> print(my_func_wrapper.__doc__) my_func(a, b=0, *args, c=1, **kwargs) An example function. .. warning:: This function only works with functions which do not have any named keyword-only arguments. For example this function cannot be handled:: def f(*args, kw_only_arg=123) This is due to a limitation in the underlying introspection library provided in Python 2. Parameters ---------- f : function The function whose signature will be used. Need not be the same as the decorated function. include_self : bool Should an initial 'self' arguments be included in the signature? (These are assumed to be arguments called 'self' without a default value). kw_only_args : dict Optionally, add a set of keyword-only arguments to the function signature. This is useful if the wrapper function adds new keyword-only arguments.
[ "Decorator", "which", "adds", "the", "function", "signature", "of", "f", "to", "the", "decorated", "function", "s", "docstring", "." ]
3a3e053d3214899b6d68758685835de0afd5542b
https://github.com/project-rig/rig/blob/3a3e053d3214899b6d68758685835de0afd5542b/rig/utils/docstrings.py#L54-L165
train
50,572
project-rig/rig
rig/machine_control/machine_controller.py
_if_not_closed
def _if_not_closed(f): """Run the method iff. the memory view hasn't been closed and the parent object has not been freed.""" @add_signature_to_docstring(f) @functools.wraps(f) def f_(self, *args, **kwargs): if self.closed or self._parent._freed: raise OSError return f(self, *args, **kwargs) return f_
python
def _if_not_closed(f): """Run the method iff. the memory view hasn't been closed and the parent object has not been freed.""" @add_signature_to_docstring(f) @functools.wraps(f) def f_(self, *args, **kwargs): if self.closed or self._parent._freed: raise OSError return f(self, *args, **kwargs) return f_
[ "def", "_if_not_closed", "(", "f", ")", ":", "@", "add_signature_to_docstring", "(", "f", ")", "@", "functools", ".", "wraps", "(", "f", ")", "def", "f_", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "self", ".", "closed"...
Run the method iff. the memory view hasn't been closed and the parent object has not been freed.
[ "Run", "the", "method", "iff", ".", "the", "memory", "view", "hasn", "t", "been", "closed", "and", "the", "parent", "object", "has", "not", "been", "freed", "." ]
3a3e053d3214899b6d68758685835de0afd5542b
https://github.com/project-rig/rig/blob/3a3e053d3214899b6d68758685835de0afd5542b/rig/machine_control/machine_controller.py#L2482-L2492
train
50,573
project-rig/rig
rig/machine_control/machine_controller.py
_if_not_freed
def _if_not_freed(f): """Run the method iff. the memory view hasn't been closed.""" @add_signature_to_docstring(f) @functools.wraps(f) def f_(self, *args, **kwargs): if self._freed: raise OSError return f(self, *args, **kwargs) return f_
python
def _if_not_freed(f): """Run the method iff. the memory view hasn't been closed.""" @add_signature_to_docstring(f) @functools.wraps(f) def f_(self, *args, **kwargs): if self._freed: raise OSError return f(self, *args, **kwargs) return f_
[ "def", "_if_not_freed", "(", "f", ")", ":", "@", "add_signature_to_docstring", "(", "f", ")", "@", "functools", ".", "wraps", "(", "f", ")", "def", "f_", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "self", ".", "_freed",...
Run the method iff. the memory view hasn't been closed.
[ "Run", "the", "method", "iff", ".", "the", "memory", "view", "hasn", "t", "been", "closed", "." ]
3a3e053d3214899b6d68758685835de0afd5542b
https://github.com/project-rig/rig/blob/3a3e053d3214899b6d68758685835de0afd5542b/rig/machine_control/machine_controller.py#L2738-L2747
train
50,574
project-rig/rig
rig/machine_control/machine_controller.py
unpack_routing_table_entry
def unpack_routing_table_entry(packed): """Unpack a routing table entry read from a SpiNNaker machine. Parameters ---------- packet : :py:class:`bytes` Bytes containing a packed routing table. Returns ------- (:py:class:`~rig.routing_table.RoutingTableEntry`, app_id, core) or None Tuple containing the routing entry, the app_id associated with the entry and the core number associated with the entry; or None if the routing table entry is flagged as unused. """ # Unpack the routing table entry _, free, route, key, mask = struct.unpack(consts.RTE_PACK_STRING, packed) # If the top 8 bits of the route are set then this entry is not in use, so # return None. if route & 0xff000000 == 0xff000000: return None # Convert the routing table entry routes = {r for r in routing_table.Routes if (route >> r) & 0x1} rte = routing_table.RoutingTableEntry(routes, key, mask) # Convert the surrounding data app_id = free & 0xff core = (free >> 8) & 0x0f return (rte, app_id, core)
python
def unpack_routing_table_entry(packed): """Unpack a routing table entry read from a SpiNNaker machine. Parameters ---------- packet : :py:class:`bytes` Bytes containing a packed routing table. Returns ------- (:py:class:`~rig.routing_table.RoutingTableEntry`, app_id, core) or None Tuple containing the routing entry, the app_id associated with the entry and the core number associated with the entry; or None if the routing table entry is flagged as unused. """ # Unpack the routing table entry _, free, route, key, mask = struct.unpack(consts.RTE_PACK_STRING, packed) # If the top 8 bits of the route are set then this entry is not in use, so # return None. if route & 0xff000000 == 0xff000000: return None # Convert the routing table entry routes = {r for r in routing_table.Routes if (route >> r) & 0x1} rte = routing_table.RoutingTableEntry(routes, key, mask) # Convert the surrounding data app_id = free & 0xff core = (free >> 8) & 0x0f return (rte, app_id, core)
[ "def", "unpack_routing_table_entry", "(", "packed", ")", ":", "# Unpack the routing table entry", "_", ",", "free", ",", "route", ",", "key", ",", "mask", "=", "struct", ".", "unpack", "(", "consts", ".", "RTE_PACK_STRING", ",", "packed", ")", "# If the top 8 bi...
Unpack a routing table entry read from a SpiNNaker machine. Parameters ---------- packet : :py:class:`bytes` Bytes containing a packed routing table. Returns ------- (:py:class:`~rig.routing_table.RoutingTableEntry`, app_id, core) or None Tuple containing the routing entry, the app_id associated with the entry and the core number associated with the entry; or None if the routing table entry is flagged as unused.
[ "Unpack", "a", "routing", "table", "entry", "read", "from", "a", "SpiNNaker", "machine", "." ]
3a3e053d3214899b6d68758685835de0afd5542b
https://github.com/project-rig/rig/blob/3a3e053d3214899b6d68758685835de0afd5542b/rig/machine_control/machine_controller.py#L2838-L2869
train
50,575
project-rig/rig
rig/machine_control/machine_controller.py
MachineController.send_scp
def send_scp(self, *args, **kwargs): """Transmit an SCP Packet and return the response. This function is a thin wrapper around :py:meth:`rig.machine_control.scp_connection.SCPConnection.send_scp`. This function will attempt to use the SCP connection nearest the destination of the SCP command if multiple connections have been discovered using :py:meth:`.discover_connections`. Parameters ---------- x : int y : int p : int *args **kwargs """ # Retrieve contextual arguments from the keyword arguments. The # context system ensures that these values are present. x = kwargs.pop("x") y = kwargs.pop("y") p = kwargs.pop("p") return self._send_scp(x, y, p, *args, **kwargs)
python
def send_scp(self, *args, **kwargs): """Transmit an SCP Packet and return the response. This function is a thin wrapper around :py:meth:`rig.machine_control.scp_connection.SCPConnection.send_scp`. This function will attempt to use the SCP connection nearest the destination of the SCP command if multiple connections have been discovered using :py:meth:`.discover_connections`. Parameters ---------- x : int y : int p : int *args **kwargs """ # Retrieve contextual arguments from the keyword arguments. The # context system ensures that these values are present. x = kwargs.pop("x") y = kwargs.pop("y") p = kwargs.pop("p") return self._send_scp(x, y, p, *args, **kwargs)
[ "def", "send_scp", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "# Retrieve contextual arguments from the keyword arguments. The", "# context system ensures that these values are present.", "x", "=", "kwargs", ".", "pop", "(", "\"x\"", ")", "y", ...
Transmit an SCP Packet and return the response. This function is a thin wrapper around :py:meth:`rig.machine_control.scp_connection.SCPConnection.send_scp`. This function will attempt to use the SCP connection nearest the destination of the SCP command if multiple connections have been discovered using :py:meth:`.discover_connections`. Parameters ---------- x : int y : int p : int *args **kwargs
[ "Transmit", "an", "SCP", "Packet", "and", "return", "the", "response", "." ]
3a3e053d3214899b6d68758685835de0afd5542b
https://github.com/project-rig/rig/blob/3a3e053d3214899b6d68758685835de0afd5542b/rig/machine_control/machine_controller.py#L182-L205
train
50,576
project-rig/rig
rig/machine_control/machine_controller.py
MachineController._get_connection
def _get_connection(self, x, y): """Get the appropriate connection for a chip.""" if (self._width is None or self._height is None or self._root_chip is None): return self.connections[None] else: # If possible, use the local Ethernet connected chip eth_chip = spinn5_local_eth_coord(x, y, self._width, self._height, *self._root_chip) conn = self.connections.get(eth_chip) if conn is not None: return conn else: # If no connection was available to the local board, choose # another arbitrarily. # XXX: This choice will cause lots of contention in systems # with many missing Ethernet connections. return self.connections[None]
python
def _get_connection(self, x, y): """Get the appropriate connection for a chip.""" if (self._width is None or self._height is None or self._root_chip is None): return self.connections[None] else: # If possible, use the local Ethernet connected chip eth_chip = spinn5_local_eth_coord(x, y, self._width, self._height, *self._root_chip) conn = self.connections.get(eth_chip) if conn is not None: return conn else: # If no connection was available to the local board, choose # another arbitrarily. # XXX: This choice will cause lots of contention in systems # with many missing Ethernet connections. return self.connections[None]
[ "def", "_get_connection", "(", "self", ",", "x", ",", "y", ")", ":", "if", "(", "self", ".", "_width", "is", "None", "or", "self", ".", "_height", "is", "None", "or", "self", ".", "_root_chip", "is", "None", ")", ":", "return", "self", ".", "connec...
Get the appropriate connection for a chip.
[ "Get", "the", "appropriate", "connection", "for", "a", "chip", "." ]
3a3e053d3214899b6d68758685835de0afd5542b
https://github.com/project-rig/rig/blob/3a3e053d3214899b6d68758685835de0afd5542b/rig/machine_control/machine_controller.py#L207-L224
train
50,577
project-rig/rig
rig/machine_control/machine_controller.py
MachineController.boot
def boot(self, width=None, height=None, only_if_needed=True, check_booted=True, **boot_kwargs): """Boot a SpiNNaker machine. The system will be booted from the Ethernet connected chip whose hostname was given as the argument to the MachineController. With the default arguments this method will only boot systems which have not already been booted and will wait until machine is completely booted (and raise a :py:exc:`.SpiNNakerBootError` on failure). This method uses :py:func:`rig.machine_control.boot.boot` to send boot commands to the machine and update the struct files contained within this object according to those used during boot. .. warning:: Booting the system over the open internet is likely to fail due to the port number being blocked by most ISPs and UDP not being reliable. A proxy such as `spinnaker_proxy <https://github.com/project-rig/spinnaker_proxy>`_ may be useful in this situation. Parameters ---------- width, height : *Deprecated* **Deprecated.** In older versions of SC&MP, it was necessary to indicate the size of the machine being booted. These parameters are now ignored and setting them will produce a deprecation warning. scamp_binary : filename or None Filename of the binary to boot the machine with or None to use the SC&MP binary bundled with Rig. sark_struct : filename or None The 'sark.struct' file which defines the datastructures or None to use the one bundled with Rig. boot_delay : float Number of seconds to pause between sending boot data packets. post_boot_delay : float Number of seconds to wait after sending last piece of boot data to give SC&MP time to re-initialise the Ethernet interface. only_if_needed : bool If ``only_if_needed`` is True (the default), this method checks to see if the machine is already booted and only attempts to boot the machine if neccessary. If ``only_if_needed`` is False, the boot commands will be sent to the target machine without checking if it is already booted or not. .. warning:: If the machine has already been booted, sending the boot commands again will not 'reboot' the machine with the newly supplied boot image, even if ``only_if_needed`` is False. check_booted : bool If ``check_booted`` is True this method waits for the machine to be fully booted before returning. If False, this check is skipped and the function returns as soon as the machine's Ethernet interface is likely to be up (but not necessarily before booting has completed). sv_overrides : {name: value, ...} Additional arguments used to override the default values in the 'sv' struct defined in the struct file. Returns ------- bool Returns True if the machine was sent boot commands, False if the machine was already booted. Raises ------ rig.machine_control.machine_controller.SpiNNakerBootError Raised when ``check_booted`` is True and the boot process was unable to boot the machine. Also raised when ``only_if_needed`` is True and the remote host is a BMP. Notes ----- The constants `rig.machine_control.boot.spinX_boot_options` can be used to specify boot parameters, for example:: controller.boot(**spin3_boot_options) This is neccessary on boards such as SpiNN-3 boards if the more than LED 0 are required by an application since by default, only LED 0 is enabled. """ # Report deprecated width/height arguments if width is not None or height is not None: warnings.warn("Machine width and height are no longer needed when " "booting a machine.", DeprecationWarning) # Check to see if the machine is already booted first if only_if_needed: # We create a new MachineController which fails quickly if it # doesn't receieve a reply (since typically the machine is already # booted). quick_fail_mc = MachineController(self.initial_host, n_tries=1) try: info = quick_fail_mc.get_software_version(255, 255, 0) if "SpiNNaker" not in info.version_string: raise SpiNNakerBootError( "Remote host is not a SpiNNaker machine and so cannot " "be booted. (Are you using a BMP IP/hostname?)") # Machine did not need booting return False except SCPError: # The machine is not responding to SCP so it needs booting. pass # Actually boot the machine boot_kwargs.setdefault("boot_port", self.boot_port) self.structs = boot.boot(self.initial_host, **boot_kwargs) assert len(self.structs) > 0 # Wait for the machine to completely boot if check_booted: try: p2p_address = (255, 255) while p2p_address == (255, 255): time.sleep(0.1) p2p_address = self.get_software_version( 255, 255, 0).position except SCPError: # Machine did not respond raise SpiNNakerBootError( "The remote machine could not be booted.") # The machine was sent boot commands return True
python
def boot(self, width=None, height=None, only_if_needed=True, check_booted=True, **boot_kwargs): """Boot a SpiNNaker machine. The system will be booted from the Ethernet connected chip whose hostname was given as the argument to the MachineController. With the default arguments this method will only boot systems which have not already been booted and will wait until machine is completely booted (and raise a :py:exc:`.SpiNNakerBootError` on failure). This method uses :py:func:`rig.machine_control.boot.boot` to send boot commands to the machine and update the struct files contained within this object according to those used during boot. .. warning:: Booting the system over the open internet is likely to fail due to the port number being blocked by most ISPs and UDP not being reliable. A proxy such as `spinnaker_proxy <https://github.com/project-rig/spinnaker_proxy>`_ may be useful in this situation. Parameters ---------- width, height : *Deprecated* **Deprecated.** In older versions of SC&MP, it was necessary to indicate the size of the machine being booted. These parameters are now ignored and setting them will produce a deprecation warning. scamp_binary : filename or None Filename of the binary to boot the machine with or None to use the SC&MP binary bundled with Rig. sark_struct : filename or None The 'sark.struct' file which defines the datastructures or None to use the one bundled with Rig. boot_delay : float Number of seconds to pause between sending boot data packets. post_boot_delay : float Number of seconds to wait after sending last piece of boot data to give SC&MP time to re-initialise the Ethernet interface. only_if_needed : bool If ``only_if_needed`` is True (the default), this method checks to see if the machine is already booted and only attempts to boot the machine if neccessary. If ``only_if_needed`` is False, the boot commands will be sent to the target machine without checking if it is already booted or not. .. warning:: If the machine has already been booted, sending the boot commands again will not 'reboot' the machine with the newly supplied boot image, even if ``only_if_needed`` is False. check_booted : bool If ``check_booted`` is True this method waits for the machine to be fully booted before returning. If False, this check is skipped and the function returns as soon as the machine's Ethernet interface is likely to be up (but not necessarily before booting has completed). sv_overrides : {name: value, ...} Additional arguments used to override the default values in the 'sv' struct defined in the struct file. Returns ------- bool Returns True if the machine was sent boot commands, False if the machine was already booted. Raises ------ rig.machine_control.machine_controller.SpiNNakerBootError Raised when ``check_booted`` is True and the boot process was unable to boot the machine. Also raised when ``only_if_needed`` is True and the remote host is a BMP. Notes ----- The constants `rig.machine_control.boot.spinX_boot_options` can be used to specify boot parameters, for example:: controller.boot(**spin3_boot_options) This is neccessary on boards such as SpiNN-3 boards if the more than LED 0 are required by an application since by default, only LED 0 is enabled. """ # Report deprecated width/height arguments if width is not None or height is not None: warnings.warn("Machine width and height are no longer needed when " "booting a machine.", DeprecationWarning) # Check to see if the machine is already booted first if only_if_needed: # We create a new MachineController which fails quickly if it # doesn't receieve a reply (since typically the machine is already # booted). quick_fail_mc = MachineController(self.initial_host, n_tries=1) try: info = quick_fail_mc.get_software_version(255, 255, 0) if "SpiNNaker" not in info.version_string: raise SpiNNakerBootError( "Remote host is not a SpiNNaker machine and so cannot " "be booted. (Are you using a BMP IP/hostname?)") # Machine did not need booting return False except SCPError: # The machine is not responding to SCP so it needs booting. pass # Actually boot the machine boot_kwargs.setdefault("boot_port", self.boot_port) self.structs = boot.boot(self.initial_host, **boot_kwargs) assert len(self.structs) > 0 # Wait for the machine to completely boot if check_booted: try: p2p_address = (255, 255) while p2p_address == (255, 255): time.sleep(0.1) p2p_address = self.get_software_version( 255, 255, 0).position except SCPError: # Machine did not respond raise SpiNNakerBootError( "The remote machine could not be booted.") # The machine was sent boot commands return True
[ "def", "boot", "(", "self", ",", "width", "=", "None", ",", "height", "=", "None", ",", "only_if_needed", "=", "True", ",", "check_booted", "=", "True", ",", "*", "*", "boot_kwargs", ")", ":", "# Report deprecated width/height arguments", "if", "width", "is"...
Boot a SpiNNaker machine. The system will be booted from the Ethernet connected chip whose hostname was given as the argument to the MachineController. With the default arguments this method will only boot systems which have not already been booted and will wait until machine is completely booted (and raise a :py:exc:`.SpiNNakerBootError` on failure). This method uses :py:func:`rig.machine_control.boot.boot` to send boot commands to the machine and update the struct files contained within this object according to those used during boot. .. warning:: Booting the system over the open internet is likely to fail due to the port number being blocked by most ISPs and UDP not being reliable. A proxy such as `spinnaker_proxy <https://github.com/project-rig/spinnaker_proxy>`_ may be useful in this situation. Parameters ---------- width, height : *Deprecated* **Deprecated.** In older versions of SC&MP, it was necessary to indicate the size of the machine being booted. These parameters are now ignored and setting them will produce a deprecation warning. scamp_binary : filename or None Filename of the binary to boot the machine with or None to use the SC&MP binary bundled with Rig. sark_struct : filename or None The 'sark.struct' file which defines the datastructures or None to use the one bundled with Rig. boot_delay : float Number of seconds to pause between sending boot data packets. post_boot_delay : float Number of seconds to wait after sending last piece of boot data to give SC&MP time to re-initialise the Ethernet interface. only_if_needed : bool If ``only_if_needed`` is True (the default), this method checks to see if the machine is already booted and only attempts to boot the machine if neccessary. If ``only_if_needed`` is False, the boot commands will be sent to the target machine without checking if it is already booted or not. .. warning:: If the machine has already been booted, sending the boot commands again will not 'reboot' the machine with the newly supplied boot image, even if ``only_if_needed`` is False. check_booted : bool If ``check_booted`` is True this method waits for the machine to be fully booted before returning. If False, this check is skipped and the function returns as soon as the machine's Ethernet interface is likely to be up (but not necessarily before booting has completed). sv_overrides : {name: value, ...} Additional arguments used to override the default values in the 'sv' struct defined in the struct file. Returns ------- bool Returns True if the machine was sent boot commands, False if the machine was already booted. Raises ------ rig.machine_control.machine_controller.SpiNNakerBootError Raised when ``check_booted`` is True and the boot process was unable to boot the machine. Also raised when ``only_if_needed`` is True and the remote host is a BMP. Notes ----- The constants `rig.machine_control.boot.spinX_boot_options` can be used to specify boot parameters, for example:: controller.boot(**spin3_boot_options) This is neccessary on boards such as SpiNN-3 boards if the more than LED 0 are required by an application since by default, only LED 0 is enabled.
[ "Boot", "a", "SpiNNaker", "machine", "." ]
3a3e053d3214899b6d68758685835de0afd5542b
https://github.com/project-rig/rig/blob/3a3e053d3214899b6d68758685835de0afd5542b/rig/machine_control/machine_controller.py#L248-L376
train
50,578
project-rig/rig
rig/machine_control/machine_controller.py
MachineController.discover_connections
def discover_connections(self, x=255, y=255): """Attempt to discover all available Ethernet connections to a machine. After calling this method, :py:class:`.MachineController` will attempt to communicate via the Ethernet connection on the same board as the destination chip for all commands. If called multiple times, existing connections will be retained in preference to new ones. .. note:: The system must be booted for this command to succeed. .. note:: Currently, only systems comprised of multiple Ethernet-connected SpiNN-5 boards are supported. Parameters ---------- x : int y : int (Optional) The coordinates of the chip to initially use to query the system for the set of live chips. Returns ------- int The number of new connections established. """ working_chips = set( (x, y) for (x, y), route in iteritems(self.get_p2p_routing_table(x, y)) if route != consts.P2PTableEntry.none) self._width = max(x for x, y in working_chips) + 1 self._height = max(y for x, y in working_chips) + 1 num_new_connections = 0 for x, y in spinn5_eth_coords(self._width, self._height, *self.root_chip): if (x, y) in working_chips and (x, y) not in self.connections: # Discover the chip's IP address try: ip = self.get_ip_address(x, y) except SCPError: continue if ip is not None: # Create a connection to the IP self.connections[(x, y)] = \ SCPConnection(ip, self.scp_port, self.n_tries, self.timeout) # Attempt to use the connection (and remove it if it # doesn't work) try: self.get_software_version(x, y, 0) num_new_connections += 1 except SCPError: self.connections.pop((x, y)).close() return num_new_connections
python
def discover_connections(self, x=255, y=255): """Attempt to discover all available Ethernet connections to a machine. After calling this method, :py:class:`.MachineController` will attempt to communicate via the Ethernet connection on the same board as the destination chip for all commands. If called multiple times, existing connections will be retained in preference to new ones. .. note:: The system must be booted for this command to succeed. .. note:: Currently, only systems comprised of multiple Ethernet-connected SpiNN-5 boards are supported. Parameters ---------- x : int y : int (Optional) The coordinates of the chip to initially use to query the system for the set of live chips. Returns ------- int The number of new connections established. """ working_chips = set( (x, y) for (x, y), route in iteritems(self.get_p2p_routing_table(x, y)) if route != consts.P2PTableEntry.none) self._width = max(x for x, y in working_chips) + 1 self._height = max(y for x, y in working_chips) + 1 num_new_connections = 0 for x, y in spinn5_eth_coords(self._width, self._height, *self.root_chip): if (x, y) in working_chips and (x, y) not in self.connections: # Discover the chip's IP address try: ip = self.get_ip_address(x, y) except SCPError: continue if ip is not None: # Create a connection to the IP self.connections[(x, y)] = \ SCPConnection(ip, self.scp_port, self.n_tries, self.timeout) # Attempt to use the connection (and remove it if it # doesn't work) try: self.get_software_version(x, y, 0) num_new_connections += 1 except SCPError: self.connections.pop((x, y)).close() return num_new_connections
[ "def", "discover_connections", "(", "self", ",", "x", "=", "255", ",", "y", "=", "255", ")", ":", "working_chips", "=", "set", "(", "(", "x", ",", "y", ")", "for", "(", "x", ",", "y", ")", ",", "route", "in", "iteritems", "(", "self", ".", "get...
Attempt to discover all available Ethernet connections to a machine. After calling this method, :py:class:`.MachineController` will attempt to communicate via the Ethernet connection on the same board as the destination chip for all commands. If called multiple times, existing connections will be retained in preference to new ones. .. note:: The system must be booted for this command to succeed. .. note:: Currently, only systems comprised of multiple Ethernet-connected SpiNN-5 boards are supported. Parameters ---------- x : int y : int (Optional) The coordinates of the chip to initially use to query the system for the set of live chips. Returns ------- int The number of new connections established.
[ "Attempt", "to", "discover", "all", "available", "Ethernet", "connections", "to", "a", "machine", "." ]
3a3e053d3214899b6d68758685835de0afd5542b
https://github.com/project-rig/rig/blob/3a3e053d3214899b6d68758685835de0afd5542b/rig/machine_control/machine_controller.py#L379-L440
train
50,579
project-rig/rig
rig/machine_control/machine_controller.py
MachineController.application
def application(self, app_id): """Update the context to use the given application ID and stop the application when done. For example:: with cn.application(54): # All commands in this block will use app_id=54. # On leaving the block `cn.send_signal("stop", 54)` is # automatically called. """ # Get a new context and add a method that will be called before the # context is removed from the stack. context = self(app_id=app_id) context.before_close(lambda: self.send_signal("stop")) return context
python
def application(self, app_id): """Update the context to use the given application ID and stop the application when done. For example:: with cn.application(54): # All commands in this block will use app_id=54. # On leaving the block `cn.send_signal("stop", 54)` is # automatically called. """ # Get a new context and add a method that will be called before the # context is removed from the stack. context = self(app_id=app_id) context.before_close(lambda: self.send_signal("stop")) return context
[ "def", "application", "(", "self", ",", "app_id", ")", ":", "# Get a new context and add a method that will be called before the", "# context is removed from the stack.", "context", "=", "self", "(", "app_id", "=", "app_id", ")", "context", ".", "before_close", "(", "lamb...
Update the context to use the given application ID and stop the application when done. For example:: with cn.application(54): # All commands in this block will use app_id=54. # On leaving the block `cn.send_signal("stop", 54)` is # automatically called.
[ "Update", "the", "context", "to", "use", "the", "given", "application", "ID", "and", "stop", "the", "application", "when", "done", "." ]
3a3e053d3214899b6d68758685835de0afd5542b
https://github.com/project-rig/rig/blob/3a3e053d3214899b6d68758685835de0afd5542b/rig/machine_control/machine_controller.py#L443-L458
train
50,580
project-rig/rig
rig/machine_control/machine_controller.py
MachineController.get_software_version
def get_software_version(self, x=255, y=255, processor=0): """Get the software version for a given SpiNNaker core. Returns ------- :py:class:`.CoreInfo` Information about the software running on a core. """ sver = self._send_scp(x, y, processor, SCPCommands.sver) # Format the result # arg1 => p2p address, physical cpu, virtual cpu p2p = sver.arg1 >> 16 p2p_address = (p2p >> 8, p2p & 0x00ff) pcpu = (sver.arg1 >> 8) & 0xff vcpu = sver.arg1 & 0xff # arg2 => version number (parsed separately) and buffer size buffer_size = (sver.arg2 & 0xffff) software_name, version, version_labels = \ unpack_sver_response_version(sver) return CoreInfo(p2p_address, pcpu, vcpu, version, buffer_size, sver.arg3, software_name, version_labels)
python
def get_software_version(self, x=255, y=255, processor=0): """Get the software version for a given SpiNNaker core. Returns ------- :py:class:`.CoreInfo` Information about the software running on a core. """ sver = self._send_scp(x, y, processor, SCPCommands.sver) # Format the result # arg1 => p2p address, physical cpu, virtual cpu p2p = sver.arg1 >> 16 p2p_address = (p2p >> 8, p2p & 0x00ff) pcpu = (sver.arg1 >> 8) & 0xff vcpu = sver.arg1 & 0xff # arg2 => version number (parsed separately) and buffer size buffer_size = (sver.arg2 & 0xffff) software_name, version, version_labels = \ unpack_sver_response_version(sver) return CoreInfo(p2p_address, pcpu, vcpu, version, buffer_size, sver.arg3, software_name, version_labels)
[ "def", "get_software_version", "(", "self", ",", "x", "=", "255", ",", "y", "=", "255", ",", "processor", "=", "0", ")", ":", "sver", "=", "self", ".", "_send_scp", "(", "x", ",", "y", ",", "processor", ",", "SCPCommands", ".", "sver", ")", "# Form...
Get the software version for a given SpiNNaker core. Returns ------- :py:class:`.CoreInfo` Information about the software running on a core.
[ "Get", "the", "software", "version", "for", "a", "given", "SpiNNaker", "core", "." ]
3a3e053d3214899b6d68758685835de0afd5542b
https://github.com/project-rig/rig/blob/3a3e053d3214899b6d68758685835de0afd5542b/rig/machine_control/machine_controller.py#L461-L485
train
50,581
project-rig/rig
rig/machine_control/machine_controller.py
MachineController.get_ip_address
def get_ip_address(self, x, y): """Get the IP address of a particular SpiNNaker chip's Ethernet link. Returns ------- str or None The IPv4 address (as a string) of the chip's Ethernet link or None if the chip does not have an Ethernet connection or the link is currently down. """ chip_info = self.get_chip_info(x=x, y=y) return chip_info.ip_address if chip_info.ethernet_up else None
python
def get_ip_address(self, x, y): """Get the IP address of a particular SpiNNaker chip's Ethernet link. Returns ------- str or None The IPv4 address (as a string) of the chip's Ethernet link or None if the chip does not have an Ethernet connection or the link is currently down. """ chip_info = self.get_chip_info(x=x, y=y) return chip_info.ip_address if chip_info.ethernet_up else None
[ "def", "get_ip_address", "(", "self", ",", "x", ",", "y", ")", ":", "chip_info", "=", "self", ".", "get_chip_info", "(", "x", "=", "x", ",", "y", "=", "y", ")", "return", "chip_info", ".", "ip_address", "if", "chip_info", ".", "ethernet_up", "else", ...
Get the IP address of a particular SpiNNaker chip's Ethernet link. Returns ------- str or None The IPv4 address (as a string) of the chip's Ethernet link or None if the chip does not have an Ethernet connection or the link is currently down.
[ "Get", "the", "IP", "address", "of", "a", "particular", "SpiNNaker", "chip", "s", "Ethernet", "link", "." ]
3a3e053d3214899b6d68758685835de0afd5542b
https://github.com/project-rig/rig/blob/3a3e053d3214899b6d68758685835de0afd5542b/rig/machine_control/machine_controller.py#L488-L499
train
50,582
project-rig/rig
rig/machine_control/machine_controller.py
MachineController.write_across_link
def write_across_link(self, address, data, x, y, link): """Write a bytestring to an address in memory on a neigbouring chip. .. warning:: This function is intended for low-level debug use only and is not optimised for performance nor intended for more general use. This method instructs a monitor processor to send 'POKE' nearest-neighbour packets to a neighbouring chip. These packets are handled directly by the SpiNNaker router in the neighbouring chip, potentially allowing advanced debug or recovery of a chip rendered otherwise unreachable. Parameters ---------- address : int The address at which to start writing the data. Only addresses in the system-wide address map may be accessed. Addresses must be word aligned. data : :py:class:`bytes` Data to write into memory. Must be a whole number of words in length. Large writes are automatically broken into a sequence of SCP link-write commands. x : int y : int The coordinates of the chip from which the command will be sent, *not* the coordinates of the chip on which the write will be performed. link : :py:class:`rig.links.Links` The link down which the write should be sent. """ if address % 4: raise ValueError("Addresses must be word-aligned.") if len(data) % 4: raise ValueError("Data must be a whole number of words.") length_bytes = len(data) cur_byte = 0 # Write the requested data, one SCP packet worth at a time while length_bytes > 0: to_write = min(length_bytes, (self.scp_data_length & ~0b11)) cur_data = data[cur_byte:cur_byte + to_write] self._send_scp(x, y, 0, SCPCommands.link_write, arg1=address, arg2=to_write, arg3=int(link), data=cur_data, expected_args=0) # Move to the next block to write address += to_write cur_byte += to_write length_bytes -= to_write
python
def write_across_link(self, address, data, x, y, link): """Write a bytestring to an address in memory on a neigbouring chip. .. warning:: This function is intended for low-level debug use only and is not optimised for performance nor intended for more general use. This method instructs a monitor processor to send 'POKE' nearest-neighbour packets to a neighbouring chip. These packets are handled directly by the SpiNNaker router in the neighbouring chip, potentially allowing advanced debug or recovery of a chip rendered otherwise unreachable. Parameters ---------- address : int The address at which to start writing the data. Only addresses in the system-wide address map may be accessed. Addresses must be word aligned. data : :py:class:`bytes` Data to write into memory. Must be a whole number of words in length. Large writes are automatically broken into a sequence of SCP link-write commands. x : int y : int The coordinates of the chip from which the command will be sent, *not* the coordinates of the chip on which the write will be performed. link : :py:class:`rig.links.Links` The link down which the write should be sent. """ if address % 4: raise ValueError("Addresses must be word-aligned.") if len(data) % 4: raise ValueError("Data must be a whole number of words.") length_bytes = len(data) cur_byte = 0 # Write the requested data, one SCP packet worth at a time while length_bytes > 0: to_write = min(length_bytes, (self.scp_data_length & ~0b11)) cur_data = data[cur_byte:cur_byte + to_write] self._send_scp(x, y, 0, SCPCommands.link_write, arg1=address, arg2=to_write, arg3=int(link), data=cur_data, expected_args=0) # Move to the next block to write address += to_write cur_byte += to_write length_bytes -= to_write
[ "def", "write_across_link", "(", "self", ",", "address", ",", "data", ",", "x", ",", "y", ",", "link", ")", ":", "if", "address", "%", "4", ":", "raise", "ValueError", "(", "\"Addresses must be word-aligned.\"", ")", "if", "len", "(", "data", ")", "%", ...
Write a bytestring to an address in memory on a neigbouring chip. .. warning:: This function is intended for low-level debug use only and is not optimised for performance nor intended for more general use. This method instructs a monitor processor to send 'POKE' nearest-neighbour packets to a neighbouring chip. These packets are handled directly by the SpiNNaker router in the neighbouring chip, potentially allowing advanced debug or recovery of a chip rendered otherwise unreachable. Parameters ---------- address : int The address at which to start writing the data. Only addresses in the system-wide address map may be accessed. Addresses must be word aligned. data : :py:class:`bytes` Data to write into memory. Must be a whole number of words in length. Large writes are automatically broken into a sequence of SCP link-write commands. x : int y : int The coordinates of the chip from which the command will be sent, *not* the coordinates of the chip on which the write will be performed. link : :py:class:`rig.links.Links` The link down which the write should be sent.
[ "Write", "a", "bytestring", "to", "an", "address", "in", "memory", "on", "a", "neigbouring", "chip", "." ]
3a3e053d3214899b6d68758685835de0afd5542b
https://github.com/project-rig/rig/blob/3a3e053d3214899b6d68758685835de0afd5542b/rig/machine_control/machine_controller.py#L549-L600
train
50,583
project-rig/rig
rig/machine_control/machine_controller.py
MachineController.read_across_link
def read_across_link(self, address, length_bytes, x, y, link): """Read a bytestring from an address in memory on a neigbouring chip. .. warning:: This function is intended for low-level debug use only and is not optimised for performance nor intended for more general use. This method instructs a monitor processor to send 'PEEK' nearest-neighbour packets to a neighbouring chip. These packets are handled directly by the SpiNNaker router in the neighbouring chip, potentially allowing advanced debug or recovery of a chip rendered otherwise unreachable. Parameters ---------- address : int The address at which to start reading the data. Only addresses in the system-wide address map may be accessed. Addresses must be word aligned. length_bytes : int The number of bytes to read from memory. Must be a multiple of four (i.e. a whole number of words). Large reads are transparently broken into multiple SCP link-read commands. x : int y : int The coordinates of the chip from which the command will be sent, *not* the coordinates of the chip on which the read will be performed. link : :py:class:`rig.links.Links` The link down which the read should be sent. Returns ------- :py:class:`bytes` The data is read back from memory as a bytestring. """ if address % 4: raise ValueError("Addresses must be word-aligned.") if length_bytes % 4: raise ValueError("Lengths must be multiples of words.") # Prepare the buffer to receive the incoming data data = bytearray(length_bytes) mem = memoryview(data) # Read the requested data, one SCP packet worth at a time while length_bytes > 0: to_read = min(length_bytes, (self.scp_data_length & ~0b11)) response = self._send_scp(x, y, 0, SCPCommands.link_read, arg1=address, arg2=to_read, arg3=int(link), expected_args=0) # Accumulate the incoming data and advance the memoryview through # the buffer. mem[:to_read] = response.data mem = mem[to_read:] # Move to the next block to read address += to_read length_bytes -= to_read return bytes(data)
python
def read_across_link(self, address, length_bytes, x, y, link): """Read a bytestring from an address in memory on a neigbouring chip. .. warning:: This function is intended for low-level debug use only and is not optimised for performance nor intended for more general use. This method instructs a monitor processor to send 'PEEK' nearest-neighbour packets to a neighbouring chip. These packets are handled directly by the SpiNNaker router in the neighbouring chip, potentially allowing advanced debug or recovery of a chip rendered otherwise unreachable. Parameters ---------- address : int The address at which to start reading the data. Only addresses in the system-wide address map may be accessed. Addresses must be word aligned. length_bytes : int The number of bytes to read from memory. Must be a multiple of four (i.e. a whole number of words). Large reads are transparently broken into multiple SCP link-read commands. x : int y : int The coordinates of the chip from which the command will be sent, *not* the coordinates of the chip on which the read will be performed. link : :py:class:`rig.links.Links` The link down which the read should be sent. Returns ------- :py:class:`bytes` The data is read back from memory as a bytestring. """ if address % 4: raise ValueError("Addresses must be word-aligned.") if length_bytes % 4: raise ValueError("Lengths must be multiples of words.") # Prepare the buffer to receive the incoming data data = bytearray(length_bytes) mem = memoryview(data) # Read the requested data, one SCP packet worth at a time while length_bytes > 0: to_read = min(length_bytes, (self.scp_data_length & ~0b11)) response = self._send_scp(x, y, 0, SCPCommands.link_read, arg1=address, arg2=to_read, arg3=int(link), expected_args=0) # Accumulate the incoming data and advance the memoryview through # the buffer. mem[:to_read] = response.data mem = mem[to_read:] # Move to the next block to read address += to_read length_bytes -= to_read return bytes(data)
[ "def", "read_across_link", "(", "self", ",", "address", ",", "length_bytes", ",", "x", ",", "y", ",", "link", ")", ":", "if", "address", "%", "4", ":", "raise", "ValueError", "(", "\"Addresses must be word-aligned.\"", ")", "if", "length_bytes", "%", "4", ...
Read a bytestring from an address in memory on a neigbouring chip. .. warning:: This function is intended for low-level debug use only and is not optimised for performance nor intended for more general use. This method instructs a monitor processor to send 'PEEK' nearest-neighbour packets to a neighbouring chip. These packets are handled directly by the SpiNNaker router in the neighbouring chip, potentially allowing advanced debug or recovery of a chip rendered otherwise unreachable. Parameters ---------- address : int The address at which to start reading the data. Only addresses in the system-wide address map may be accessed. Addresses must be word aligned. length_bytes : int The number of bytes to read from memory. Must be a multiple of four (i.e. a whole number of words). Large reads are transparently broken into multiple SCP link-read commands. x : int y : int The coordinates of the chip from which the command will be sent, *not* the coordinates of the chip on which the read will be performed. link : :py:class:`rig.links.Links` The link down which the read should be sent. Returns ------- :py:class:`bytes` The data is read back from memory as a bytestring.
[ "Read", "a", "bytestring", "from", "an", "address", "in", "memory", "on", "a", "neigbouring", "chip", "." ]
3a3e053d3214899b6d68758685835de0afd5542b
https://github.com/project-rig/rig/blob/3a3e053d3214899b6d68758685835de0afd5542b/rig/machine_control/machine_controller.py#L603-L667
train
50,584
project-rig/rig
rig/machine_control/machine_controller.py
MachineController.read_struct_field
def read_struct_field(self, struct_name, field_name, x, y, p=0): """Read the value out of a struct maintained by SARK. This method is particularly useful for reading fields from the ``sv`` struct which, for example, holds information about system status. See ``sark.h`` for details. Parameters ---------- struct_name : string Name of the struct to read from, e.g., `"sv"` field_name : string Name of the field to read, e.g., `"eth_addr"` Returns ------- value The value returned is unpacked given the struct specification. Currently arrays are returned as tuples, e.g.:: # Returns a 20-tuple. cn.read_struct_field("sv", "status_map") # Fails cn.read_struct_field("sv", "status_map[1]") """ # Look up the struct and field field, address, pack_chars = \ self._get_struct_field_and_address(struct_name, field_name) length = struct.calcsize(pack_chars) # Perform the read data = self.read(address, length, x, y, p) # Unpack the data unpacked = struct.unpack(pack_chars, data) if field.length == 1: return unpacked[0] else: return unpacked
python
def read_struct_field(self, struct_name, field_name, x, y, p=0): """Read the value out of a struct maintained by SARK. This method is particularly useful for reading fields from the ``sv`` struct which, for example, holds information about system status. See ``sark.h`` for details. Parameters ---------- struct_name : string Name of the struct to read from, e.g., `"sv"` field_name : string Name of the field to read, e.g., `"eth_addr"` Returns ------- value The value returned is unpacked given the struct specification. Currently arrays are returned as tuples, e.g.:: # Returns a 20-tuple. cn.read_struct_field("sv", "status_map") # Fails cn.read_struct_field("sv", "status_map[1]") """ # Look up the struct and field field, address, pack_chars = \ self._get_struct_field_and_address(struct_name, field_name) length = struct.calcsize(pack_chars) # Perform the read data = self.read(address, length, x, y, p) # Unpack the data unpacked = struct.unpack(pack_chars, data) if field.length == 1: return unpacked[0] else: return unpacked
[ "def", "read_struct_field", "(", "self", ",", "struct_name", ",", "field_name", ",", "x", ",", "y", ",", "p", "=", "0", ")", ":", "# Look up the struct and field", "field", ",", "address", ",", "pack_chars", "=", "self", ".", "_get_struct_field_and_address", "...
Read the value out of a struct maintained by SARK. This method is particularly useful for reading fields from the ``sv`` struct which, for example, holds information about system status. See ``sark.h`` for details. Parameters ---------- struct_name : string Name of the struct to read from, e.g., `"sv"` field_name : string Name of the field to read, e.g., `"eth_addr"` Returns ------- value The value returned is unpacked given the struct specification. Currently arrays are returned as tuples, e.g.:: # Returns a 20-tuple. cn.read_struct_field("sv", "status_map") # Fails cn.read_struct_field("sv", "status_map[1]")
[ "Read", "the", "value", "out", "of", "a", "struct", "maintained", "by", "SARK", "." ]
3a3e053d3214899b6d68758685835de0afd5542b
https://github.com/project-rig/rig/blob/3a3e053d3214899b6d68758685835de0afd5542b/rig/machine_control/machine_controller.py#L677-L718
train
50,585
project-rig/rig
rig/machine_control/machine_controller.py
MachineController.write_struct_field
def write_struct_field(self, struct_name, field_name, values, x, y, p=0): """Write a value into a struct. This method is particularly useful for writing values into the ``sv`` struct which contains some configuration data. See ``sark.h`` for details. Parameters ---------- struct_name : string Name of the struct to write to, e.g., `"sv"` field_name : string Name of the field to write, e.g., `"random"` values : Value(s) to be written into the field. .. warning:: Fields which are arrays must currently be written in their entirety. """ # Look up the struct and field field, address, pack_chars = \ self._get_struct_field_and_address(struct_name, field_name) if field.length != 1: assert len(values) == field.length data = struct.pack(pack_chars, *values) else: data = struct.pack(pack_chars, values) # Perform the write self.write(address, data, x, y, p)
python
def write_struct_field(self, struct_name, field_name, values, x, y, p=0): """Write a value into a struct. This method is particularly useful for writing values into the ``sv`` struct which contains some configuration data. See ``sark.h`` for details. Parameters ---------- struct_name : string Name of the struct to write to, e.g., `"sv"` field_name : string Name of the field to write, e.g., `"random"` values : Value(s) to be written into the field. .. warning:: Fields which are arrays must currently be written in their entirety. """ # Look up the struct and field field, address, pack_chars = \ self._get_struct_field_and_address(struct_name, field_name) if field.length != 1: assert len(values) == field.length data = struct.pack(pack_chars, *values) else: data = struct.pack(pack_chars, values) # Perform the write self.write(address, data, x, y, p)
[ "def", "write_struct_field", "(", "self", ",", "struct_name", ",", "field_name", ",", "values", ",", "x", ",", "y", ",", "p", "=", "0", ")", ":", "# Look up the struct and field", "field", ",", "address", ",", "pack_chars", "=", "self", ".", "_get_struct_fie...
Write a value into a struct. This method is particularly useful for writing values into the ``sv`` struct which contains some configuration data. See ``sark.h`` for details. Parameters ---------- struct_name : string Name of the struct to write to, e.g., `"sv"` field_name : string Name of the field to write, e.g., `"random"` values : Value(s) to be written into the field. .. warning:: Fields which are arrays must currently be written in their entirety.
[ "Write", "a", "value", "into", "a", "struct", "." ]
3a3e053d3214899b6d68758685835de0afd5542b
https://github.com/project-rig/rig/blob/3a3e053d3214899b6d68758685835de0afd5542b/rig/machine_control/machine_controller.py#L721-L752
train
50,586
project-rig/rig
rig/machine_control/machine_controller.py
MachineController._get_vcpu_field_and_address
def _get_vcpu_field_and_address(self, field_name, x, y, p): """Get the field and address for a VCPU struct field.""" vcpu_struct = self.structs[b"vcpu"] field = vcpu_struct[six.b(field_name)] address = (self.read_struct_field("sv", "vcpu_base", x, y) + vcpu_struct.size * p) + field.offset pack_chars = b"<" + field.pack_chars return field, address, pack_chars
python
def _get_vcpu_field_and_address(self, field_name, x, y, p): """Get the field and address for a VCPU struct field.""" vcpu_struct = self.structs[b"vcpu"] field = vcpu_struct[six.b(field_name)] address = (self.read_struct_field("sv", "vcpu_base", x, y) + vcpu_struct.size * p) + field.offset pack_chars = b"<" + field.pack_chars return field, address, pack_chars
[ "def", "_get_vcpu_field_and_address", "(", "self", ",", "field_name", ",", "x", ",", "y", ",", "p", ")", ":", "vcpu_struct", "=", "self", ".", "structs", "[", "b\"vcpu\"", "]", "field", "=", "vcpu_struct", "[", "six", ".", "b", "(", "field_name", ")", ...
Get the field and address for a VCPU struct field.
[ "Get", "the", "field", "and", "address", "for", "a", "VCPU", "struct", "field", "." ]
3a3e053d3214899b6d68758685835de0afd5542b
https://github.com/project-rig/rig/blob/3a3e053d3214899b6d68758685835de0afd5542b/rig/machine_control/machine_controller.py#L754-L761
train
50,587
project-rig/rig
rig/machine_control/machine_controller.py
MachineController.read_vcpu_struct_field
def read_vcpu_struct_field(self, field_name, x, y, p): """Read a value out of the VCPU struct for a specific core. Similar to :py:meth:`.read_struct_field` except this method accesses the individual VCPU struct for to each core and contains application runtime status. Parameters ---------- field_name : string Name of the field to read from the struct (e.g. `"cpu_state"`) Returns ------- value A value of the type contained in the specified struct field. """ # Get the base address of the VCPU struct for this chip, then advance # to get the correct VCPU struct for the requested core. field, address, pack_chars = \ self._get_vcpu_field_and_address(field_name, x, y, p) # Perform the read length = struct.calcsize(pack_chars) data = self.read(address, length, x, y) # Unpack and return unpacked = struct.unpack(pack_chars, data) if field.length == 1: return unpacked[0] else: # If the field is a string then truncate it and return if b"s" in pack_chars: return unpacked[0].strip(b"\x00").decode("utf-8") # Otherwise just return. (Note: at the time of writing, no fields # in the VCPU struct are of this form.) return unpacked
python
def read_vcpu_struct_field(self, field_name, x, y, p): """Read a value out of the VCPU struct for a specific core. Similar to :py:meth:`.read_struct_field` except this method accesses the individual VCPU struct for to each core and contains application runtime status. Parameters ---------- field_name : string Name of the field to read from the struct (e.g. `"cpu_state"`) Returns ------- value A value of the type contained in the specified struct field. """ # Get the base address of the VCPU struct for this chip, then advance # to get the correct VCPU struct for the requested core. field, address, pack_chars = \ self._get_vcpu_field_and_address(field_name, x, y, p) # Perform the read length = struct.calcsize(pack_chars) data = self.read(address, length, x, y) # Unpack and return unpacked = struct.unpack(pack_chars, data) if field.length == 1: return unpacked[0] else: # If the field is a string then truncate it and return if b"s" in pack_chars: return unpacked[0].strip(b"\x00").decode("utf-8") # Otherwise just return. (Note: at the time of writing, no fields # in the VCPU struct are of this form.) return unpacked
[ "def", "read_vcpu_struct_field", "(", "self", ",", "field_name", ",", "x", ",", "y", ",", "p", ")", ":", "# Get the base address of the VCPU struct for this chip, then advance", "# to get the correct VCPU struct for the requested core.", "field", ",", "address", ",", "pack_ch...
Read a value out of the VCPU struct for a specific core. Similar to :py:meth:`.read_struct_field` except this method accesses the individual VCPU struct for to each core and contains application runtime status. Parameters ---------- field_name : string Name of the field to read from the struct (e.g. `"cpu_state"`) Returns ------- value A value of the type contained in the specified struct field.
[ "Read", "a", "value", "out", "of", "the", "VCPU", "struct", "for", "a", "specific", "core", "." ]
3a3e053d3214899b6d68758685835de0afd5542b
https://github.com/project-rig/rig/blob/3a3e053d3214899b6d68758685835de0afd5542b/rig/machine_control/machine_controller.py#L764-L802
train
50,588
project-rig/rig
rig/machine_control/machine_controller.py
MachineController.write_vcpu_struct_field
def write_vcpu_struct_field(self, field_name, value, x, y, p): """Write a value to the VCPU struct for a specific core. Parameters ---------- field_name : string Name of the field to write (e.g. `"user0"`) value : Value to write to this field. """ field, address, pack_chars = \ self._get_vcpu_field_and_address(field_name, x, y, p) # Pack the data if b"s" in pack_chars: data = struct.pack(pack_chars, value.encode('utf-8')) elif field.length == 1: data = struct.pack(pack_chars, value) else: # NOTE: At the time of writing no VCPU struct fields are of this # form. data = struct.pack(pack_chars, *value) # pragma: no cover # Perform the write self.write(address, data, x, y)
python
def write_vcpu_struct_field(self, field_name, value, x, y, p): """Write a value to the VCPU struct for a specific core. Parameters ---------- field_name : string Name of the field to write (e.g. `"user0"`) value : Value to write to this field. """ field, address, pack_chars = \ self._get_vcpu_field_and_address(field_name, x, y, p) # Pack the data if b"s" in pack_chars: data = struct.pack(pack_chars, value.encode('utf-8')) elif field.length == 1: data = struct.pack(pack_chars, value) else: # NOTE: At the time of writing no VCPU struct fields are of this # form. data = struct.pack(pack_chars, *value) # pragma: no cover # Perform the write self.write(address, data, x, y)
[ "def", "write_vcpu_struct_field", "(", "self", ",", "field_name", ",", "value", ",", "x", ",", "y", ",", "p", ")", ":", "field", ",", "address", ",", "pack_chars", "=", "self", ".", "_get_vcpu_field_and_address", "(", "field_name", ",", "x", ",", "y", ",...
Write a value to the VCPU struct for a specific core. Parameters ---------- field_name : string Name of the field to write (e.g. `"user0"`) value : Value to write to this field.
[ "Write", "a", "value", "to", "the", "VCPU", "struct", "for", "a", "specific", "core", "." ]
3a3e053d3214899b6d68758685835de0afd5542b
https://github.com/project-rig/rig/blob/3a3e053d3214899b6d68758685835de0afd5542b/rig/machine_control/machine_controller.py#L805-L829
train
50,589
project-rig/rig
rig/machine_control/machine_controller.py
MachineController.get_processor_status
def get_processor_status(self, p, x, y): """Get the status of a given core and the application executing on it. Returns ------- :py:class:`.ProcessorStatus` Representation of the current state of the processor. """ # Get the VCPU base address = (self.read_struct_field("sv", "vcpu_base", x, y) + self.structs[b"vcpu"].size * p) # Get the VCPU data data = self.read(address, self.structs[b"vcpu"].size, x, y) # Build the kwargs that describe the current state state = { name.decode('utf-8'): struct.unpack( f.pack_chars, data[f.offset:f.offset+struct.calcsize(f.pack_chars)] )[0] for (name, f) in iteritems(self.structs[b"vcpu"].fields) } state["registers"] = [state.pop("r{}".format(i)) for i in range(8)] state["user_vars"] = [state.pop("user{}".format(i)) for i in range(4)] state["app_name"] = state["app_name"].strip(b'\x00').decode('utf-8') state["cpu_state"] = consts.AppState(state["cpu_state"]) state["rt_code"] = consts.RuntimeException(state["rt_code"]) sw_ver = state.pop("sw_ver") state["version"] = ((sw_ver >> 16) & 0xFF, (sw_ver >> 8) & 0xFF, (sw_ver >> 0) & 0xFF) for newname, oldname in [("iobuf_address", "iobuf"), ("program_state_register", "psr"), ("stack_pointer", "sp"), ("link_register", "lr"), ]: state[newname] = state.pop(oldname) state.pop("__PAD") return ProcessorStatus(**state)
python
def get_processor_status(self, p, x, y): """Get the status of a given core and the application executing on it. Returns ------- :py:class:`.ProcessorStatus` Representation of the current state of the processor. """ # Get the VCPU base address = (self.read_struct_field("sv", "vcpu_base", x, y) + self.structs[b"vcpu"].size * p) # Get the VCPU data data = self.read(address, self.structs[b"vcpu"].size, x, y) # Build the kwargs that describe the current state state = { name.decode('utf-8'): struct.unpack( f.pack_chars, data[f.offset:f.offset+struct.calcsize(f.pack_chars)] )[0] for (name, f) in iteritems(self.structs[b"vcpu"].fields) } state["registers"] = [state.pop("r{}".format(i)) for i in range(8)] state["user_vars"] = [state.pop("user{}".format(i)) for i in range(4)] state["app_name"] = state["app_name"].strip(b'\x00').decode('utf-8') state["cpu_state"] = consts.AppState(state["cpu_state"]) state["rt_code"] = consts.RuntimeException(state["rt_code"]) sw_ver = state.pop("sw_ver") state["version"] = ((sw_ver >> 16) & 0xFF, (sw_ver >> 8) & 0xFF, (sw_ver >> 0) & 0xFF) for newname, oldname in [("iobuf_address", "iobuf"), ("program_state_register", "psr"), ("stack_pointer", "sp"), ("link_register", "lr"), ]: state[newname] = state.pop(oldname) state.pop("__PAD") return ProcessorStatus(**state)
[ "def", "get_processor_status", "(", "self", ",", "p", ",", "x", ",", "y", ")", ":", "# Get the VCPU base", "address", "=", "(", "self", ".", "read_struct_field", "(", "\"sv\"", ",", "\"vcpu_base\"", ",", "x", ",", "y", ")", "+", "self", ".", "structs", ...
Get the status of a given core and the application executing on it. Returns ------- :py:class:`.ProcessorStatus` Representation of the current state of the processor.
[ "Get", "the", "status", "of", "a", "given", "core", "and", "the", "application", "executing", "on", "it", "." ]
3a3e053d3214899b6d68758685835de0afd5542b
https://github.com/project-rig/rig/blob/3a3e053d3214899b6d68758685835de0afd5542b/rig/machine_control/machine_controller.py#L832-L871
train
50,590
project-rig/rig
rig/machine_control/machine_controller.py
MachineController.get_iobuf
def get_iobuf(self, p, x, y): """Read the messages ``io_printf``'d into the ``IOBUF`` buffer on a specified core. See also: :py:meth:`.get_iobuf_bytes` which returns the undecoded raw bytes in the ``IOBUF``. Useful if the IOBUF contains non-text or non-UTF-8 encoded text. Returns ------- str The string in the ``IOBUF``, decoded from UTF-8. """ return self.get_iobuf_bytes(p, x, y).decode("utf-8")
python
def get_iobuf(self, p, x, y): """Read the messages ``io_printf``'d into the ``IOBUF`` buffer on a specified core. See also: :py:meth:`.get_iobuf_bytes` which returns the undecoded raw bytes in the ``IOBUF``. Useful if the IOBUF contains non-text or non-UTF-8 encoded text. Returns ------- str The string in the ``IOBUF``, decoded from UTF-8. """ return self.get_iobuf_bytes(p, x, y).decode("utf-8")
[ "def", "get_iobuf", "(", "self", ",", "p", ",", "x", ",", "y", ")", ":", "return", "self", ".", "get_iobuf_bytes", "(", "p", ",", "x", ",", "y", ")", ".", "decode", "(", "\"utf-8\"", ")" ]
Read the messages ``io_printf``'d into the ``IOBUF`` buffer on a specified core. See also: :py:meth:`.get_iobuf_bytes` which returns the undecoded raw bytes in the ``IOBUF``. Useful if the IOBUF contains non-text or non-UTF-8 encoded text. Returns ------- str The string in the ``IOBUF``, decoded from UTF-8.
[ "Read", "the", "messages", "io_printf", "d", "into", "the", "IOBUF", "buffer", "on", "a", "specified", "core", "." ]
3a3e053d3214899b6d68758685835de0afd5542b
https://github.com/project-rig/rig/blob/3a3e053d3214899b6d68758685835de0afd5542b/rig/machine_control/machine_controller.py#L874-L887
train
50,591
project-rig/rig
rig/machine_control/machine_controller.py
MachineController.get_iobuf_bytes
def get_iobuf_bytes(self, p, x, y): """Read raw bytes ``io_printf``'d into the ``IOBUF`` buffer on a specified core. This may be useful when the data contained in the ``IOBUF`` is not UTF-8 encoded text. See also: :py:meth:`.get_iobuf` which returns a decoded string rather than raw bytes. Returns ------- bytes The raw, undecoded string data in the buffer. """ # The IOBUF data is stored in a linked-list of blocks of memory in # SDRAM. The size of each block is given in SV iobuf_size = self.read_struct_field("sv", "iobuf_size", x, y) # The first block in the list is given in the core's VCPU field address = self.read_vcpu_struct_field("iobuf", x, y, p) iobuf = b"" while address: # The IOBUF data is proceeded by a header which gives the next # address and also the length of the string in the current buffer. iobuf_data = self.read(address, iobuf_size + 16, x, y) address, time, ms, length = struct.unpack("<4I", iobuf_data[:16]) iobuf += iobuf_data[16:16 + length] return iobuf
python
def get_iobuf_bytes(self, p, x, y): """Read raw bytes ``io_printf``'d into the ``IOBUF`` buffer on a specified core. This may be useful when the data contained in the ``IOBUF`` is not UTF-8 encoded text. See also: :py:meth:`.get_iobuf` which returns a decoded string rather than raw bytes. Returns ------- bytes The raw, undecoded string data in the buffer. """ # The IOBUF data is stored in a linked-list of blocks of memory in # SDRAM. The size of each block is given in SV iobuf_size = self.read_struct_field("sv", "iobuf_size", x, y) # The first block in the list is given in the core's VCPU field address = self.read_vcpu_struct_field("iobuf", x, y, p) iobuf = b"" while address: # The IOBUF data is proceeded by a header which gives the next # address and also the length of the string in the current buffer. iobuf_data = self.read(address, iobuf_size + 16, x, y) address, time, ms, length = struct.unpack("<4I", iobuf_data[:16]) iobuf += iobuf_data[16:16 + length] return iobuf
[ "def", "get_iobuf_bytes", "(", "self", ",", "p", ",", "x", ",", "y", ")", ":", "# The IOBUF data is stored in a linked-list of blocks of memory in", "# SDRAM. The size of each block is given in SV", "iobuf_size", "=", "self", ".", "read_struct_field", "(", "\"sv\"", ",", ...
Read raw bytes ``io_printf``'d into the ``IOBUF`` buffer on a specified core. This may be useful when the data contained in the ``IOBUF`` is not UTF-8 encoded text. See also: :py:meth:`.get_iobuf` which returns a decoded string rather than raw bytes. Returns ------- bytes The raw, undecoded string data in the buffer.
[ "Read", "raw", "bytes", "io_printf", "d", "into", "the", "IOBUF", "buffer", "on", "a", "specified", "core", "." ]
3a3e053d3214899b6d68758685835de0afd5542b
https://github.com/project-rig/rig/blob/3a3e053d3214899b6d68758685835de0afd5542b/rig/machine_control/machine_controller.py#L890-L921
train
50,592
project-rig/rig
rig/machine_control/machine_controller.py
MachineController.get_router_diagnostics
def get_router_diagnostics(self, x, y): """Get the values of the router diagnostic counters. Returns ------- :py:class:`~.RouterDiagnostics` Description of the state of the counters. """ # Read the block of memory data = self.read(0xe1000300, 64, x=x, y=y) # Convert to 16 ints, then process that as the appropriate tuple type return RouterDiagnostics(*struct.unpack("<16I", data))
python
def get_router_diagnostics(self, x, y): """Get the values of the router diagnostic counters. Returns ------- :py:class:`~.RouterDiagnostics` Description of the state of the counters. """ # Read the block of memory data = self.read(0xe1000300, 64, x=x, y=y) # Convert to 16 ints, then process that as the appropriate tuple type return RouterDiagnostics(*struct.unpack("<16I", data))
[ "def", "get_router_diagnostics", "(", "self", ",", "x", ",", "y", ")", ":", "# Read the block of memory", "data", "=", "self", ".", "read", "(", "0xe1000300", ",", "64", ",", "x", "=", "x", ",", "y", "=", "y", ")", "# Convert to 16 ints, then process that as...
Get the values of the router diagnostic counters. Returns ------- :py:class:`~.RouterDiagnostics` Description of the state of the counters.
[ "Get", "the", "values", "of", "the", "router", "diagnostic", "counters", "." ]
3a3e053d3214899b6d68758685835de0afd5542b
https://github.com/project-rig/rig/blob/3a3e053d3214899b6d68758685835de0afd5542b/rig/machine_control/machine_controller.py#L924-L936
train
50,593
project-rig/rig
rig/machine_control/machine_controller.py
MachineController.iptag_set
def iptag_set(self, iptag, addr, port, x, y): """Set the value of an IPTag. Forward SDP packets with the specified IP tag sent by a SpiNNaker application to a given external IP address. A :ref:`tutorial example <scp-and-sdp-tutorial>` of the use of IP Tags to send and receive SDP packets to and from applications is also available. Parameters ---------- iptag : int Index of the IPTag to set addr : string IP address or hostname that the IPTag should point at. port : int UDP port that the IPTag should direct packets to. """ # Format the IP address ip_addr = struct.pack('!4B', *map(int, socket.gethostbyname(addr).split('.'))) self._send_scp(x, y, 0, SCPCommands.iptag, int(consts.IPTagCommands.set) << 16 | iptag, port, struct.unpack('<I', ip_addr)[0])
python
def iptag_set(self, iptag, addr, port, x, y): """Set the value of an IPTag. Forward SDP packets with the specified IP tag sent by a SpiNNaker application to a given external IP address. A :ref:`tutorial example <scp-and-sdp-tutorial>` of the use of IP Tags to send and receive SDP packets to and from applications is also available. Parameters ---------- iptag : int Index of the IPTag to set addr : string IP address or hostname that the IPTag should point at. port : int UDP port that the IPTag should direct packets to. """ # Format the IP address ip_addr = struct.pack('!4B', *map(int, socket.gethostbyname(addr).split('.'))) self._send_scp(x, y, 0, SCPCommands.iptag, int(consts.IPTagCommands.set) << 16 | iptag, port, struct.unpack('<I', ip_addr)[0])
[ "def", "iptag_set", "(", "self", ",", "iptag", ",", "addr", ",", "port", ",", "x", ",", "y", ")", ":", "# Format the IP address", "ip_addr", "=", "struct", ".", "pack", "(", "'!4B'", ",", "*", "map", "(", "int", ",", "socket", ".", "gethostbyname", "...
Set the value of an IPTag. Forward SDP packets with the specified IP tag sent by a SpiNNaker application to a given external IP address. A :ref:`tutorial example <scp-and-sdp-tutorial>` of the use of IP Tags to send and receive SDP packets to and from applications is also available. Parameters ---------- iptag : int Index of the IPTag to set addr : string IP address or hostname that the IPTag should point at. port : int UDP port that the IPTag should direct packets to.
[ "Set", "the", "value", "of", "an", "IPTag", "." ]
3a3e053d3214899b6d68758685835de0afd5542b
https://github.com/project-rig/rig/blob/3a3e053d3214899b6d68758685835de0afd5542b/rig/machine_control/machine_controller.py#L939-L963
train
50,594
project-rig/rig
rig/machine_control/machine_controller.py
MachineController.iptag_get
def iptag_get(self, iptag, x, y): """Get the value of an IPTag. Parameters ---------- iptag : int Index of the IPTag to get Returns ------- :py:class:`.IPTag` The IPTag returned from SpiNNaker. """ ack = self._send_scp(x, y, 0, SCPCommands.iptag, int(consts.IPTagCommands.get) << 16 | iptag, 1, expected_args=0) return IPTag.from_bytestring(ack.data)
python
def iptag_get(self, iptag, x, y): """Get the value of an IPTag. Parameters ---------- iptag : int Index of the IPTag to get Returns ------- :py:class:`.IPTag` The IPTag returned from SpiNNaker. """ ack = self._send_scp(x, y, 0, SCPCommands.iptag, int(consts.IPTagCommands.get) << 16 | iptag, 1, expected_args=0) return IPTag.from_bytestring(ack.data)
[ "def", "iptag_get", "(", "self", ",", "iptag", ",", "x", ",", "y", ")", ":", "ack", "=", "self", ".", "_send_scp", "(", "x", ",", "y", ",", "0", ",", "SCPCommands", ".", "iptag", ",", "int", "(", "consts", ".", "IPTagCommands", ".", "get", ")", ...
Get the value of an IPTag. Parameters ---------- iptag : int Index of the IPTag to get Returns ------- :py:class:`.IPTag` The IPTag returned from SpiNNaker.
[ "Get", "the", "value", "of", "an", "IPTag", "." ]
3a3e053d3214899b6d68758685835de0afd5542b
https://github.com/project-rig/rig/blob/3a3e053d3214899b6d68758685835de0afd5542b/rig/machine_control/machine_controller.py#L966-L982
train
50,595
project-rig/rig
rig/machine_control/machine_controller.py
MachineController.iptag_clear
def iptag_clear(self, iptag, x, y): """Clear an IPTag. Parameters ---------- iptag : int Index of the IPTag to clear. """ self._send_scp(x, y, 0, SCPCommands.iptag, int(consts.IPTagCommands.clear) << 16 | iptag)
python
def iptag_clear(self, iptag, x, y): """Clear an IPTag. Parameters ---------- iptag : int Index of the IPTag to clear. """ self._send_scp(x, y, 0, SCPCommands.iptag, int(consts.IPTagCommands.clear) << 16 | iptag)
[ "def", "iptag_clear", "(", "self", ",", "iptag", ",", "x", ",", "y", ")", ":", "self", ".", "_send_scp", "(", "x", ",", "y", ",", "0", ",", "SCPCommands", ".", "iptag", ",", "int", "(", "consts", ".", "IPTagCommands", ".", "clear", ")", "<<", "16...
Clear an IPTag. Parameters ---------- iptag : int Index of the IPTag to clear.
[ "Clear", "an", "IPTag", "." ]
3a3e053d3214899b6d68758685835de0afd5542b
https://github.com/project-rig/rig/blob/3a3e053d3214899b6d68758685835de0afd5542b/rig/machine_control/machine_controller.py#L985-L994
train
50,596
project-rig/rig
rig/machine_control/machine_controller.py
MachineController.fill
def fill(self, address, data, size, x, y, p): """Fill a region of memory with the specified byte. Parameters ---------- data : int Data with which to fill memory. If `address` and `size` are word aligned then `data` is assumed to be a word; otherwise it is assumed to be a byte. Notes ----- If the address and size are word aligned then a fast fill method will be used, otherwise a much slower write will be incurred. """ if size % 4 or address % 4: # If neither the size nor the address are word aligned we have to # use `write` as `sark_word_set` can only work with words. # Convert the data into a string and then write: data = struct.pack('<B', data) * size self.write(address, data, x, y, p) else: # We can perform a fill, this will call `sark_word_set` internally. self._send_scp(x, y, p, SCPCommands.fill, address, data, size)
python
def fill(self, address, data, size, x, y, p): """Fill a region of memory with the specified byte. Parameters ---------- data : int Data with which to fill memory. If `address` and `size` are word aligned then `data` is assumed to be a word; otherwise it is assumed to be a byte. Notes ----- If the address and size are word aligned then a fast fill method will be used, otherwise a much slower write will be incurred. """ if size % 4 or address % 4: # If neither the size nor the address are word aligned we have to # use `write` as `sark_word_set` can only work with words. # Convert the data into a string and then write: data = struct.pack('<B', data) * size self.write(address, data, x, y, p) else: # We can perform a fill, this will call `sark_word_set` internally. self._send_scp(x, y, p, SCPCommands.fill, address, data, size)
[ "def", "fill", "(", "self", ",", "address", ",", "data", ",", "size", ",", "x", ",", "y", ",", "p", ")", ":", "if", "size", "%", "4", "or", "address", "%", "4", ":", "# If neither the size nor the address are word aligned we have to", "# use `write` as `sark_w...
Fill a region of memory with the specified byte. Parameters ---------- data : int Data with which to fill memory. If `address` and `size` are word aligned then `data` is assumed to be a word; otherwise it is assumed to be a byte. Notes ----- If the address and size are word aligned then a fast fill method will be used, otherwise a much slower write will be incurred.
[ "Fill", "a", "region", "of", "memory", "with", "the", "specified", "byte", "." ]
3a3e053d3214899b6d68758685835de0afd5542b
https://github.com/project-rig/rig/blob/3a3e053d3214899b6d68758685835de0afd5542b/rig/machine_control/machine_controller.py#L1020-L1043
train
50,597
project-rig/rig
rig/machine_control/machine_controller.py
MachineController.sdram_alloc
def sdram_alloc(self, size, tag=0, x=Required, y=Required, app_id=Required, clear=False): """Allocate a region of SDRAM for an application. Requests SARK to allocate a block of SDRAM for an application and raises a :py:exc:`.SpiNNakerMemoryError` on failure. This allocation will be freed when the application is stopped. Parameters ---------- size : int Number of bytes to attempt to allocate in SDRAM. tag : int 8-bit tag that can be looked up by a SpiNNaker application to discover the address of the allocated block. The tag must be unique for this ``app_id`` on this chip. Attempting to allocate two blocks on the same chip and for the same ``app_id`` will fail. If ``0`` (the default) then no tag is applied. For example, if some SDRAM is allocated with ``tag=12``, a SpiNNaker application can later discover the address using:: void *allocated_data = sark_tag_ptr(12, 0); A common convention is to allocate one block of SDRAM per application core and give each allocation the associated core number as its tag. This way the underlying SpiNNaker applications can simply call:: void *allocated_data = sark_tag_ptr(sark_core_id(), 0); clear : bool If True the requested memory will be filled with zeros before the pointer is returned. If False (the default) the memory will be left as-is. Returns ------- int Address of the start of the region. The allocated SDRAM remains valid until either the 'stop' signal is sent to the application ID associated with the allocation or :py:meth:`.sdram_free` is called on the address returned. Raises ------ rig.machine_control.machine_controller.SpiNNakerMemoryError If the memory cannot be allocated, the tag is already taken or it is invalid. """ assert 0 <= tag < 256 # Construct arg1 (app_id << 8) | op code arg1 = app_id << 8 | consts.AllocOperations.alloc_sdram # Send the packet and retrieve the address rv = self._send_scp(x, y, 0, SCPCommands.alloc_free, arg1, size, tag) if rv.arg1 == 0: # Allocation failed tag_in_use = False if tag != 0: # If a tag was specified then read the allocation table to see # if the tag was already in use or whether we ran out of # memory. alloc_tags = self.read_struct_field("sv", "alloc_tag", x, y) index = (app_id << 8) + tag entry = self.read(alloc_tags + index, 4, x, y) tag_in_use = (entry != 0) raise SpiNNakerMemoryError(size, x, y, tag, tag_in_use) # Get the address address = rv.arg1 if clear: # Clear the memory if so desired self.fill(address, 0, size, x, y, 0) return address
python
def sdram_alloc(self, size, tag=0, x=Required, y=Required, app_id=Required, clear=False): """Allocate a region of SDRAM for an application. Requests SARK to allocate a block of SDRAM for an application and raises a :py:exc:`.SpiNNakerMemoryError` on failure. This allocation will be freed when the application is stopped. Parameters ---------- size : int Number of bytes to attempt to allocate in SDRAM. tag : int 8-bit tag that can be looked up by a SpiNNaker application to discover the address of the allocated block. The tag must be unique for this ``app_id`` on this chip. Attempting to allocate two blocks on the same chip and for the same ``app_id`` will fail. If ``0`` (the default) then no tag is applied. For example, if some SDRAM is allocated with ``tag=12``, a SpiNNaker application can later discover the address using:: void *allocated_data = sark_tag_ptr(12, 0); A common convention is to allocate one block of SDRAM per application core and give each allocation the associated core number as its tag. This way the underlying SpiNNaker applications can simply call:: void *allocated_data = sark_tag_ptr(sark_core_id(), 0); clear : bool If True the requested memory will be filled with zeros before the pointer is returned. If False (the default) the memory will be left as-is. Returns ------- int Address of the start of the region. The allocated SDRAM remains valid until either the 'stop' signal is sent to the application ID associated with the allocation or :py:meth:`.sdram_free` is called on the address returned. Raises ------ rig.machine_control.machine_controller.SpiNNakerMemoryError If the memory cannot be allocated, the tag is already taken or it is invalid. """ assert 0 <= tag < 256 # Construct arg1 (app_id << 8) | op code arg1 = app_id << 8 | consts.AllocOperations.alloc_sdram # Send the packet and retrieve the address rv = self._send_scp(x, y, 0, SCPCommands.alloc_free, arg1, size, tag) if rv.arg1 == 0: # Allocation failed tag_in_use = False if tag != 0: # If a tag was specified then read the allocation table to see # if the tag was already in use or whether we ran out of # memory. alloc_tags = self.read_struct_field("sv", "alloc_tag", x, y) index = (app_id << 8) + tag entry = self.read(alloc_tags + index, 4, x, y) tag_in_use = (entry != 0) raise SpiNNakerMemoryError(size, x, y, tag, tag_in_use) # Get the address address = rv.arg1 if clear: # Clear the memory if so desired self.fill(address, 0, size, x, y, 0) return address
[ "def", "sdram_alloc", "(", "self", ",", "size", ",", "tag", "=", "0", ",", "x", "=", "Required", ",", "y", "=", "Required", ",", "app_id", "=", "Required", ",", "clear", "=", "False", ")", ":", "assert", "0", "<=", "tag", "<", "256", "# Construct a...
Allocate a region of SDRAM for an application. Requests SARK to allocate a block of SDRAM for an application and raises a :py:exc:`.SpiNNakerMemoryError` on failure. This allocation will be freed when the application is stopped. Parameters ---------- size : int Number of bytes to attempt to allocate in SDRAM. tag : int 8-bit tag that can be looked up by a SpiNNaker application to discover the address of the allocated block. The tag must be unique for this ``app_id`` on this chip. Attempting to allocate two blocks on the same chip and for the same ``app_id`` will fail. If ``0`` (the default) then no tag is applied. For example, if some SDRAM is allocated with ``tag=12``, a SpiNNaker application can later discover the address using:: void *allocated_data = sark_tag_ptr(12, 0); A common convention is to allocate one block of SDRAM per application core and give each allocation the associated core number as its tag. This way the underlying SpiNNaker applications can simply call:: void *allocated_data = sark_tag_ptr(sark_core_id(), 0); clear : bool If True the requested memory will be filled with zeros before the pointer is returned. If False (the default) the memory will be left as-is. Returns ------- int Address of the start of the region. The allocated SDRAM remains valid until either the 'stop' signal is sent to the application ID associated with the allocation or :py:meth:`.sdram_free` is called on the address returned. Raises ------ rig.machine_control.machine_controller.SpiNNakerMemoryError If the memory cannot be allocated, the tag is already taken or it is invalid.
[ "Allocate", "a", "region", "of", "SDRAM", "for", "an", "application", "." ]
3a3e053d3214899b6d68758685835de0afd5542b
https://github.com/project-rig/rig/blob/3a3e053d3214899b6d68758685835de0afd5542b/rig/machine_control/machine_controller.py#L1046-L1124
train
50,598
project-rig/rig
rig/machine_control/machine_controller.py
MachineController.sdram_free
def sdram_free(self, ptr, x=Required, y=Required): """Free an allocated block of memory in SDRAM. .. note:: All unfreed SDRAM allocations associated with an application are automatically freed when the 'stop' signal is sent (e.g. after leaving a :py:meth:`.application` block). As such, this method is only useful when specific blocks are to be freed while retaining others. Parameters ---------- ptr : int Address of the block of memory to free. """ self._send_scp(x, y, 0, SCPCommands.alloc_free, consts.AllocOperations.free_sdram_by_ptr, ptr)
python
def sdram_free(self, ptr, x=Required, y=Required): """Free an allocated block of memory in SDRAM. .. note:: All unfreed SDRAM allocations associated with an application are automatically freed when the 'stop' signal is sent (e.g. after leaving a :py:meth:`.application` block). As such, this method is only useful when specific blocks are to be freed while retaining others. Parameters ---------- ptr : int Address of the block of memory to free. """ self._send_scp(x, y, 0, SCPCommands.alloc_free, consts.AllocOperations.free_sdram_by_ptr, ptr)
[ "def", "sdram_free", "(", "self", ",", "ptr", ",", "x", "=", "Required", ",", "y", "=", "Required", ")", ":", "self", ".", "_send_scp", "(", "x", ",", "y", ",", "0", ",", "SCPCommands", ".", "alloc_free", ",", "consts", ".", "AllocOperations", ".", ...
Free an allocated block of memory in SDRAM. .. note:: All unfreed SDRAM allocations associated with an application are automatically freed when the 'stop' signal is sent (e.g. after leaving a :py:meth:`.application` block). As such, this method is only useful when specific blocks are to be freed while retaining others. Parameters ---------- ptr : int Address of the block of memory to free.
[ "Free", "an", "allocated", "block", "of", "memory", "in", "SDRAM", "." ]
3a3e053d3214899b6d68758685835de0afd5542b
https://github.com/project-rig/rig/blob/3a3e053d3214899b6d68758685835de0afd5542b/rig/machine_control/machine_controller.py#L1173-L1190
train
50,599