id
int32
0
252k
repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
51
19.8k
code_tokens
list
docstring
stringlengths
3
17.3k
docstring_tokens
list
sha
stringlengths
40
40
url
stringlengths
87
242
8,700
reanahub/reana-commons
reana_commons/utils.py
render_cvmfs_pvc
def render_cvmfs_pvc(cvmfs_volume): """Render REANA_CVMFS_PVC_TEMPLATE.""" name = CVMFS_REPOSITORIES[cvmfs_volume] rendered_template = dict(REANA_CVMFS_PVC_TEMPLATE) rendered_template['metadata']['name'] = 'csi-cvmfs-{}-pvc'.format(name) rendered_template['spec']['storageClassName'] = "csi-cvmfs-{}".format(name) return rendered_template
python
def render_cvmfs_pvc(cvmfs_volume): name = CVMFS_REPOSITORIES[cvmfs_volume] rendered_template = dict(REANA_CVMFS_PVC_TEMPLATE) rendered_template['metadata']['name'] = 'csi-cvmfs-{}-pvc'.format(name) rendered_template['spec']['storageClassName'] = "csi-cvmfs-{}".format(name) return rendered_template
[ "def", "render_cvmfs_pvc", "(", "cvmfs_volume", ")", ":", "name", "=", "CVMFS_REPOSITORIES", "[", "cvmfs_volume", "]", "rendered_template", "=", "dict", "(", "REANA_CVMFS_PVC_TEMPLATE", ")", "rendered_template", "[", "'metadata'", "]", "[", "'name'", "]", "=", "'c...
Render REANA_CVMFS_PVC_TEMPLATE.
[ "Render", "REANA_CVMFS_PVC_TEMPLATE", "." ]
abf31d9f495e0d93171c43fc4a414cd292091b11
https://github.com/reanahub/reana-commons/blob/abf31d9f495e0d93171c43fc4a414cd292091b11/reana_commons/utils.py#L201-L207
8,701
reanahub/reana-commons
reana_commons/utils.py
render_cvmfs_sc
def render_cvmfs_sc(cvmfs_volume): """Render REANA_CVMFS_SC_TEMPLATE.""" name = CVMFS_REPOSITORIES[cvmfs_volume] rendered_template = dict(REANA_CVMFS_SC_TEMPLATE) rendered_template['metadata']['name'] = "csi-cvmfs-{}".format(name) rendered_template['parameters']['repository'] = cvmfs_volume return rendered_template
python
def render_cvmfs_sc(cvmfs_volume): name = CVMFS_REPOSITORIES[cvmfs_volume] rendered_template = dict(REANA_CVMFS_SC_TEMPLATE) rendered_template['metadata']['name'] = "csi-cvmfs-{}".format(name) rendered_template['parameters']['repository'] = cvmfs_volume return rendered_template
[ "def", "render_cvmfs_sc", "(", "cvmfs_volume", ")", ":", "name", "=", "CVMFS_REPOSITORIES", "[", "cvmfs_volume", "]", "rendered_template", "=", "dict", "(", "REANA_CVMFS_SC_TEMPLATE", ")", "rendered_template", "[", "'metadata'", "]", "[", "'name'", "]", "=", "\"cs...
Render REANA_CVMFS_SC_TEMPLATE.
[ "Render", "REANA_CVMFS_SC_TEMPLATE", "." ]
abf31d9f495e0d93171c43fc4a414cd292091b11
https://github.com/reanahub/reana-commons/blob/abf31d9f495e0d93171c43fc4a414cd292091b11/reana_commons/utils.py#L210-L216
8,702
reanahub/reana-commons
reana_commons/utils.py
create_cvmfs_storage_class
def create_cvmfs_storage_class(cvmfs_volume): """Create CVMFS storage class.""" from kubernetes.client.rest import ApiException from reana_commons.k8s.api_client import current_k8s_storagev1_api_client try: current_k8s_storagev1_api_client.\ create_storage_class( render_cvmfs_sc(cvmfs_volume) ) except ApiException as e: if e.status != 409: raise e
python
def create_cvmfs_storage_class(cvmfs_volume): from kubernetes.client.rest import ApiException from reana_commons.k8s.api_client import current_k8s_storagev1_api_client try: current_k8s_storagev1_api_client.\ create_storage_class( render_cvmfs_sc(cvmfs_volume) ) except ApiException as e: if e.status != 409: raise e
[ "def", "create_cvmfs_storage_class", "(", "cvmfs_volume", ")", ":", "from", "kubernetes", ".", "client", ".", "rest", "import", "ApiException", "from", "reana_commons", ".", "k8s", ".", "api_client", "import", "current_k8s_storagev1_api_client", "try", ":", "current_k...
Create CVMFS storage class.
[ "Create", "CVMFS", "storage", "class", "." ]
abf31d9f495e0d93171c43fc4a414cd292091b11
https://github.com/reanahub/reana-commons/blob/abf31d9f495e0d93171c43fc4a414cd292091b11/reana_commons/utils.py#L219-L231
8,703
reanahub/reana-commons
reana_commons/utils.py
create_cvmfs_persistent_volume_claim
def create_cvmfs_persistent_volume_claim(cvmfs_volume): """Create CVMFS persistent volume claim.""" from kubernetes.client.rest import ApiException from reana_commons.k8s.api_client import current_k8s_corev1_api_client try: current_k8s_corev1_api_client.\ create_namespaced_persistent_volume_claim( "default", render_cvmfs_pvc(cvmfs_volume) ) except ApiException as e: if e.status != 409: raise e
python
def create_cvmfs_persistent_volume_claim(cvmfs_volume): from kubernetes.client.rest import ApiException from reana_commons.k8s.api_client import current_k8s_corev1_api_client try: current_k8s_corev1_api_client.\ create_namespaced_persistent_volume_claim( "default", render_cvmfs_pvc(cvmfs_volume) ) except ApiException as e: if e.status != 409: raise e
[ "def", "create_cvmfs_persistent_volume_claim", "(", "cvmfs_volume", ")", ":", "from", "kubernetes", ".", "client", ".", "rest", "import", "ApiException", "from", "reana_commons", ".", "k8s", ".", "api_client", "import", "current_k8s_corev1_api_client", "try", ":", "cu...
Create CVMFS persistent volume claim.
[ "Create", "CVMFS", "persistent", "volume", "claim", "." ]
abf31d9f495e0d93171c43fc4a414cd292091b11
https://github.com/reanahub/reana-commons/blob/abf31d9f495e0d93171c43fc4a414cd292091b11/reana_commons/utils.py#L234-L247
8,704
reanahub/reana-commons
reana_commons/k8s/api_client.py
create_api_client
def create_api_client(api='BatchV1'): """Create Kubernetes API client using config. :param api: String which represents which Kubernetes API to spawn. By default BatchV1. :returns: Kubernetes python client object for a specific API i.e. BatchV1. """ k8s_config.load_incluster_config() api_configuration = client.Configuration() api_configuration.verify_ssl = False if api == 'extensions/v1beta1': api_client = client.ExtensionsV1beta1Api() elif api == 'CoreV1': api_client = client.CoreV1Api() elif api == 'StorageV1': api_client = client.StorageV1Api() else: api_client = client.BatchV1Api() return api_client
python
def create_api_client(api='BatchV1'): k8s_config.load_incluster_config() api_configuration = client.Configuration() api_configuration.verify_ssl = False if api == 'extensions/v1beta1': api_client = client.ExtensionsV1beta1Api() elif api == 'CoreV1': api_client = client.CoreV1Api() elif api == 'StorageV1': api_client = client.StorageV1Api() else: api_client = client.BatchV1Api() return api_client
[ "def", "create_api_client", "(", "api", "=", "'BatchV1'", ")", ":", "k8s_config", ".", "load_incluster_config", "(", ")", "api_configuration", "=", "client", ".", "Configuration", "(", ")", "api_configuration", ".", "verify_ssl", "=", "False", "if", "api", "==",...
Create Kubernetes API client using config. :param api: String which represents which Kubernetes API to spawn. By default BatchV1. :returns: Kubernetes python client object for a specific API i.e. BatchV1.
[ "Create", "Kubernetes", "API", "client", "using", "config", "." ]
abf31d9f495e0d93171c43fc4a414cd292091b11
https://github.com/reanahub/reana-commons/blob/abf31d9f495e0d93171c43fc4a414cd292091b11/reana_commons/k8s/api_client.py#L18-L36
8,705
reanahub/reana-commons
reana_commons/publisher.py
BasePublisher.__error_callback
def __error_callback(self, exception, interval): """Execute when there is an error while sending a message. :param exception: Exception which has been thrown while trying to send the message. :param interval: Interval in which the message delivery will be retried. """ logging.error('Error while publishing {}'.format( exception)) logging.info('Retry in %s seconds.', interval)
python
def __error_callback(self, exception, interval): logging.error('Error while publishing {}'.format( exception)) logging.info('Retry in %s seconds.', interval)
[ "def", "__error_callback", "(", "self", ",", "exception", ",", "interval", ")", ":", "logging", ".", "error", "(", "'Error while publishing {}'", ".", "format", "(", "exception", ")", ")", "logging", ".", "info", "(", "'Retry in %s seconds.'", ",", "interval", ...
Execute when there is an error while sending a message. :param exception: Exception which has been thrown while trying to send the message. :param interval: Interval in which the message delivery will be retried.
[ "Execute", "when", "there", "is", "an", "error", "while", "sending", "a", "message", "." ]
abf31d9f495e0d93171c43fc4a414cd292091b11
https://github.com/reanahub/reana-commons/blob/abf31d9f495e0d93171c43fc4a414cd292091b11/reana_commons/publisher.py#L53-L63
8,706
reanahub/reana-commons
reana_commons/publisher.py
BasePublisher._publish
def _publish(self, msg): """Publish, handling retries, a message in the queue. :param msg: Object which represents the message to be sent in the queue. Note that this object should be serializable in the configured format (by default JSON). """ connection = self._connection.clone() publish = connection.ensure(self.producer, self.producer.publish, errback=self.__error_callback, max_retries=MQ_PRODUCER_MAX_RETRIES) publish(json.dumps(msg), exchange=self._exchange, routing_key=self._routing_key, declare=[self._queue]) logging.debug('Publisher: message sent: %s', msg)
python
def _publish(self, msg): connection = self._connection.clone() publish = connection.ensure(self.producer, self.producer.publish, errback=self.__error_callback, max_retries=MQ_PRODUCER_MAX_RETRIES) publish(json.dumps(msg), exchange=self._exchange, routing_key=self._routing_key, declare=[self._queue]) logging.debug('Publisher: message sent: %s', msg)
[ "def", "_publish", "(", "self", ",", "msg", ")", ":", "connection", "=", "self", ".", "_connection", ".", "clone", "(", ")", "publish", "=", "connection", ".", "ensure", "(", "self", ".", "producer", ",", "self", ".", "producer", ".", "publish", ",", ...
Publish, handling retries, a message in the queue. :param msg: Object which represents the message to be sent in the queue. Note that this object should be serializable in the configured format (by default JSON).
[ "Publish", "handling", "retries", "a", "message", "in", "the", "queue", "." ]
abf31d9f495e0d93171c43fc4a414cd292091b11
https://github.com/reanahub/reana-commons/blob/abf31d9f495e0d93171c43fc4a414cd292091b11/reana_commons/publisher.py#L65-L78
8,707
reanahub/reana-commons
reana_commons/publisher.py
WorkflowStatusPublisher.publish_workflow_status
def publish_workflow_status(self, workflow_uuid, status, logs='', message=None): """Publish workflow status using the configured. :param workflow_uudid: String which represents the workflow UUID. :param status: Integer which represents the status of the workflow, this is defined in the `reana-db` `Workflow` models. :param logs: String which represents the logs which the workflow has produced as output. :param message: Dictionary which includes additional information can be attached such as the overall progress of the workflow. """ msg = { "workflow_uuid": workflow_uuid, "logs": logs, "status": status, "message": message } self._publish(msg)
python
def publish_workflow_status(self, workflow_uuid, status, logs='', message=None): msg = { "workflow_uuid": workflow_uuid, "logs": logs, "status": status, "message": message } self._publish(msg)
[ "def", "publish_workflow_status", "(", "self", ",", "workflow_uuid", ",", "status", ",", "logs", "=", "''", ",", "message", "=", "None", ")", ":", "msg", "=", "{", "\"workflow_uuid\"", ":", "workflow_uuid", ",", "\"logs\"", ":", "logs", ",", "\"status\"", ...
Publish workflow status using the configured. :param workflow_uudid: String which represents the workflow UUID. :param status: Integer which represents the status of the workflow, this is defined in the `reana-db` `Workflow` models. :param logs: String which represents the logs which the workflow has produced as output. :param message: Dictionary which includes additional information can be attached such as the overall progress of the workflow.
[ "Publish", "workflow", "status", "using", "the", "configured", "." ]
abf31d9f495e0d93171c43fc4a414cd292091b11
https://github.com/reanahub/reana-commons/blob/abf31d9f495e0d93171c43fc4a414cd292091b11/reana_commons/publisher.py#L100-L118
8,708
reanahub/reana-commons
reana_commons/publisher.py
WorkflowSubmissionPublisher.publish_workflow_submission
def publish_workflow_submission(self, user_id, workflow_id_or_name, parameters): """Publish workflow submission parameters.""" msg = { "user": user_id, "workflow_id_or_name": workflow_id_or_name, "parameters": parameters } self._publish(msg)
python
def publish_workflow_submission(self, user_id, workflow_id_or_name, parameters): msg = { "user": user_id, "workflow_id_or_name": workflow_id_or_name, "parameters": parameters } self._publish(msg)
[ "def", "publish_workflow_submission", "(", "self", ",", "user_id", ",", "workflow_id_or_name", ",", "parameters", ")", ":", "msg", "=", "{", "\"user\"", ":", "user_id", ",", "\"workflow_id_or_name\"", ":", "workflow_id_or_name", ",", "\"parameters\"", ":", "paramete...
Publish workflow submission parameters.
[ "Publish", "workflow", "submission", "parameters", "." ]
abf31d9f495e0d93171c43fc4a414cd292091b11
https://github.com/reanahub/reana-commons/blob/abf31d9f495e0d93171c43fc4a414cd292091b11/reana_commons/publisher.py#L133-L143
8,709
reanahub/reana-commons
reana_commons/serial.py
serial_load
def serial_load(workflow_file, specification, parameters=None, original=None): """Validate and return a expanded REANA Serial workflow specification. :param workflow_file: A specification file compliant with REANA Serial workflow specification. :returns: A dictionary which represents the valid Serial workflow with all parameters expanded. """ parameters = parameters or {} if not specification: with open(workflow_file, 'r') as f: specification = json.loads(f.read()) expanded_specification = _expand_parameters(specification, parameters, original) validate(specification, serial_workflow_schema) return expanded_specification
python
def serial_load(workflow_file, specification, parameters=None, original=None): parameters = parameters or {} if not specification: with open(workflow_file, 'r') as f: specification = json.loads(f.read()) expanded_specification = _expand_parameters(specification, parameters, original) validate(specification, serial_workflow_schema) return expanded_specification
[ "def", "serial_load", "(", "workflow_file", ",", "specification", ",", "parameters", "=", "None", ",", "original", "=", "None", ")", ":", "parameters", "=", "parameters", "or", "{", "}", "if", "not", "specification", ":", "with", "open", "(", "workflow_file"...
Validate and return a expanded REANA Serial workflow specification. :param workflow_file: A specification file compliant with REANA Serial workflow specification. :returns: A dictionary which represents the valid Serial workflow with all parameters expanded.
[ "Validate", "and", "return", "a", "expanded", "REANA", "Serial", "workflow", "specification", "." ]
abf31d9f495e0d93171c43fc4a414cd292091b11
https://github.com/reanahub/reana-commons/blob/abf31d9f495e0d93171c43fc4a414cd292091b11/reana_commons/serial.py#L55-L75
8,710
reanahub/reana-commons
reana_commons/serial.py
_expand_parameters
def _expand_parameters(specification, parameters, original=None): """Expand parameters inside comands for Serial workflow specifications. :param specification: Full valid Serial workflow specification. :param parameters: Parameters to be extended on a Serial specification. :param original: Flag which, determins type of specifications to return. :returns: If 'original' parameter is set, a copy of the specification whithout expanded parametrers will be returned. If 'original' is not set, a copy of the specification with expanded parameters (all $varname and ${varname} will be expanded with their value). Otherwise an error will be thrown if the parameters can not be expanded. :raises: jsonschema.ValidationError """ expanded_specification = deepcopy(specification) try: for step_num, step in enumerate(expanded_specification['steps']): current_step = expanded_specification['steps'][step_num] for command_num, command in enumerate(step['commands']): current_step['commands'][command_num] = \ Template(command).substitute(parameters) # if call is done from client, original==True and original # specifications withtout applied parameters are returned. if original: return specification else: return expanded_specification except KeyError as e: raise ValidationError('Workflow parameter(s) could not ' 'be expanded. Please take a look ' 'to {params}'.format(params=str(e)))
python
def _expand_parameters(specification, parameters, original=None): expanded_specification = deepcopy(specification) try: for step_num, step in enumerate(expanded_specification['steps']): current_step = expanded_specification['steps'][step_num] for command_num, command in enumerate(step['commands']): current_step['commands'][command_num] = \ Template(command).substitute(parameters) # if call is done from client, original==True and original # specifications withtout applied parameters are returned. if original: return specification else: return expanded_specification except KeyError as e: raise ValidationError('Workflow parameter(s) could not ' 'be expanded. Please take a look ' 'to {params}'.format(params=str(e)))
[ "def", "_expand_parameters", "(", "specification", ",", "parameters", ",", "original", "=", "None", ")", ":", "expanded_specification", "=", "deepcopy", "(", "specification", ")", "try", ":", "for", "step_num", ",", "step", "in", "enumerate", "(", "expanded_spec...
Expand parameters inside comands for Serial workflow specifications. :param specification: Full valid Serial workflow specification. :param parameters: Parameters to be extended on a Serial specification. :param original: Flag which, determins type of specifications to return. :returns: If 'original' parameter is set, a copy of the specification whithout expanded parametrers will be returned. If 'original' is not set, a copy of the specification with expanded parameters (all $varname and ${varname} will be expanded with their value). Otherwise an error will be thrown if the parameters can not be expanded. :raises: jsonschema.ValidationError
[ "Expand", "parameters", "inside", "comands", "for", "Serial", "workflow", "specifications", "." ]
abf31d9f495e0d93171c43fc4a414cd292091b11
https://github.com/reanahub/reana-commons/blob/abf31d9f495e0d93171c43fc4a414cd292091b11/reana_commons/serial.py#L78-L108
8,711
reanahub/reana-commons
reana_commons/tasks.py
reana_ready
def reana_ready(): """Check if reana can start new workflows.""" from reana_commons.config import REANA_READY_CONDITIONS for module_name, condition_list in REANA_READY_CONDITIONS.items(): for condition_name in condition_list: module = importlib.import_module(module_name) condition_func = getattr(module, condition_name) if not condition_func(): return False return True
python
def reana_ready(): from reana_commons.config import REANA_READY_CONDITIONS for module_name, condition_list in REANA_READY_CONDITIONS.items(): for condition_name in condition_list: module = importlib.import_module(module_name) condition_func = getattr(module, condition_name) if not condition_func(): return False return True
[ "def", "reana_ready", "(", ")", ":", "from", "reana_commons", ".", "config", "import", "REANA_READY_CONDITIONS", "for", "module_name", ",", "condition_list", "in", "REANA_READY_CONDITIONS", ".", "items", "(", ")", ":", "for", "condition_name", "in", "condition_list"...
Check if reana can start new workflows.
[ "Check", "if", "reana", "can", "start", "new", "workflows", "." ]
abf31d9f495e0d93171c43fc4a414cd292091b11
https://github.com/reanahub/reana-commons/blob/abf31d9f495e0d93171c43fc4a414cd292091b11/reana_commons/tasks.py#L24-L33
8,712
reanahub/reana-commons
reana_commons/tasks.py
check_predefined_conditions
def check_predefined_conditions(): """Check k8s predefined conditions for the nodes.""" try: node_info = current_k8s_corev1_api_client.list_node() for node in node_info.items: # check based on the predefined conditions about the # node status: MemoryPressure, OutOfDisk, KubeletReady # DiskPressure, PIDPressure, for condition in node.status.conditions: if not condition.status: return False except ApiException as e: log.error('Something went wrong while getting node information.') log.error(e) return False return True
python
def check_predefined_conditions(): try: node_info = current_k8s_corev1_api_client.list_node() for node in node_info.items: # check based on the predefined conditions about the # node status: MemoryPressure, OutOfDisk, KubeletReady # DiskPressure, PIDPressure, for condition in node.status.conditions: if not condition.status: return False except ApiException as e: log.error('Something went wrong while getting node information.') log.error(e) return False return True
[ "def", "check_predefined_conditions", "(", ")", ":", "try", ":", "node_info", "=", "current_k8s_corev1_api_client", ".", "list_node", "(", ")", "for", "node", "in", "node_info", ".", "items", ":", "# check based on the predefined conditions about the", "# node status: Mem...
Check k8s predefined conditions for the nodes.
[ "Check", "k8s", "predefined", "conditions", "for", "the", "nodes", "." ]
abf31d9f495e0d93171c43fc4a414cd292091b11
https://github.com/reanahub/reana-commons/blob/abf31d9f495e0d93171c43fc4a414cd292091b11/reana_commons/tasks.py#L36-L51
8,713
reanahub/reana-commons
reana_commons/tasks.py
check_running_job_count
def check_running_job_count(): """Check upper limit on running jobs.""" try: job_list = current_k8s_batchv1_api_client.\ list_job_for_all_namespaces() if len(job_list.items) > K8S_MAXIMUM_CONCURRENT_JOBS: return False except ApiException as e: log.error('Something went wrong while getting running job list.') log.error(e) return False return True
python
def check_running_job_count(): try: job_list = current_k8s_batchv1_api_client.\ list_job_for_all_namespaces() if len(job_list.items) > K8S_MAXIMUM_CONCURRENT_JOBS: return False except ApiException as e: log.error('Something went wrong while getting running job list.') log.error(e) return False return True
[ "def", "check_running_job_count", "(", ")", ":", "try", ":", "job_list", "=", "current_k8s_batchv1_api_client", ".", "list_job_for_all_namespaces", "(", ")", "if", "len", "(", "job_list", ".", "items", ")", ">", "K8S_MAXIMUM_CONCURRENT_JOBS", ":", "return", "False",...
Check upper limit on running jobs.
[ "Check", "upper", "limit", "on", "running", "jobs", "." ]
abf31d9f495e0d93171c43fc4a414cd292091b11
https://github.com/reanahub/reana-commons/blob/abf31d9f495e0d93171c43fc4a414cd292091b11/reana_commons/tasks.py#L54-L65
8,714
reanahub/reana-commons
reana_commons/api_client.py
BaseAPIClient._get_spec
def _get_spec(self, spec_file): """Get json specification from package data.""" spec_file_path = os.path.join( pkg_resources. resource_filename( 'reana_commons', 'openapi_specifications'), spec_file) with open(spec_file_path) as f: json_spec = json.load(f) return json_spec
python
def _get_spec(self, spec_file): spec_file_path = os.path.join( pkg_resources. resource_filename( 'reana_commons', 'openapi_specifications'), spec_file) with open(spec_file_path) as f: json_spec = json.load(f) return json_spec
[ "def", "_get_spec", "(", "self", ",", "spec_file", ")", ":", "spec_file_path", "=", "os", ".", "path", ".", "join", "(", "pkg_resources", ".", "resource_filename", "(", "'reana_commons'", ",", "'openapi_specifications'", ")", ",", "spec_file", ")", "with", "op...
Get json specification from package data.
[ "Get", "json", "specification", "from", "package", "data", "." ]
abf31d9f495e0d93171c43fc4a414cd292091b11
https://github.com/reanahub/reana-commons/blob/abf31d9f495e0d93171c43fc4a414cd292091b11/reana_commons/api_client.py#L46-L57
8,715
reanahub/reana-commons
reana_commons/api_client.py
JobControllerAPIClient.submit
def submit(self, workflow_uuid='', experiment='', image='', cmd='', prettified_cmd='', workflow_workspace='', job_name='', cvmfs_mounts='false'): """Submit a job to RJC API. :param name: Name of the job. :param experiment: Experiment the job belongs to. :param image: Identifier of the Docker image which will run the job. :param cmd: String which represents the command to execute. It can be modified by the workflow engine i.e. prepending ``cd /some/dir/``. :prettified_cmd: Original command submitted by the user. :workflow_workspace: Path to the workspace of the workflow. :cvmfs_mounts: String with CVMFS volumes to mount in job pods. :return: Returns a dict with the ``job_id``. """ job_spec = { 'experiment': experiment, 'docker_img': image, 'cmd': cmd, 'prettified_cmd': prettified_cmd, 'env_vars': {}, 'workflow_workspace': workflow_workspace, 'job_name': job_name, 'cvmfs_mounts': cvmfs_mounts, 'workflow_uuid': workflow_uuid } response, http_response = self._client.jobs.create_job(job=job_spec).\ result() if http_response.status_code == 400: raise HTTPBadRequest('Bad request to create a job. Error: {}'. format(http_response.data)) elif http_response.status_code == 500: raise HTTPInternalServerError('Internal Server Error. Error: {}'. format(http_response.data)) return response
python
def submit(self, workflow_uuid='', experiment='', image='', cmd='', prettified_cmd='', workflow_workspace='', job_name='', cvmfs_mounts='false'): job_spec = { 'experiment': experiment, 'docker_img': image, 'cmd': cmd, 'prettified_cmd': prettified_cmd, 'env_vars': {}, 'workflow_workspace': workflow_workspace, 'job_name': job_name, 'cvmfs_mounts': cvmfs_mounts, 'workflow_uuid': workflow_uuid } response, http_response = self._client.jobs.create_job(job=job_spec).\ result() if http_response.status_code == 400: raise HTTPBadRequest('Bad request to create a job. Error: {}'. format(http_response.data)) elif http_response.status_code == 500: raise HTTPInternalServerError('Internal Server Error. Error: {}'. format(http_response.data)) return response
[ "def", "submit", "(", "self", ",", "workflow_uuid", "=", "''", ",", "experiment", "=", "''", ",", "image", "=", "''", ",", "cmd", "=", "''", ",", "prettified_cmd", "=", "''", ",", "workflow_workspace", "=", "''", ",", "job_name", "=", "''", ",", "cvm...
Submit a job to RJC API. :param name: Name of the job. :param experiment: Experiment the job belongs to. :param image: Identifier of the Docker image which will run the job. :param cmd: String which represents the command to execute. It can be modified by the workflow engine i.e. prepending ``cd /some/dir/``. :prettified_cmd: Original command submitted by the user. :workflow_workspace: Path to the workspace of the workflow. :cvmfs_mounts: String with CVMFS volumes to mount in job pods. :return: Returns a dict with the ``job_id``.
[ "Submit", "a", "job", "to", "RJC", "API", "." ]
abf31d9f495e0d93171c43fc4a414cd292091b11
https://github.com/reanahub/reana-commons/blob/abf31d9f495e0d93171c43fc4a414cd292091b11/reana_commons/api_client.py#L63-L104
8,716
reanahub/reana-commons
reana_commons/api_client.py
JobControllerAPIClient.check_status
def check_status(self, job_id): """Check status of a job.""" response, http_response = self._client.jobs.get_job(job_id=job_id).\ result() if http_response.status_code == 404: raise HTTPNotFound('The given job ID was not found. Error: {}'. format(http_response.data)) return response
python
def check_status(self, job_id): response, http_response = self._client.jobs.get_job(job_id=job_id).\ result() if http_response.status_code == 404: raise HTTPNotFound('The given job ID was not found. Error: {}'. format(http_response.data)) return response
[ "def", "check_status", "(", "self", ",", "job_id", ")", ":", "response", ",", "http_response", "=", "self", ".", "_client", ".", "jobs", ".", "get_job", "(", "job_id", "=", "job_id", ")", ".", "result", "(", ")", "if", "http_response", ".", "status_code"...
Check status of a job.
[ "Check", "status", "of", "a", "job", "." ]
abf31d9f495e0d93171c43fc4a414cd292091b11
https://github.com/reanahub/reana-commons/blob/abf31d9f495e0d93171c43fc4a414cd292091b11/reana_commons/api_client.py#L106-L113
8,717
reanahub/reana-commons
reana_commons/api_client.py
JobControllerAPIClient.get_logs
def get_logs(self, job_id): """Get logs of a job.""" response, http_response = self._client.jobs.get_logs(job_id=job_id).\ result() if http_response.status_code == 404: raise HTTPNotFound('The given job ID was not found. Error: {}'. format(http_response.data)) return http_response.text
python
def get_logs(self, job_id): response, http_response = self._client.jobs.get_logs(job_id=job_id).\ result() if http_response.status_code == 404: raise HTTPNotFound('The given job ID was not found. Error: {}'. format(http_response.data)) return http_response.text
[ "def", "get_logs", "(", "self", ",", "job_id", ")", ":", "response", ",", "http_response", "=", "self", ".", "_client", ".", "jobs", ".", "get_logs", "(", "job_id", "=", "job_id", ")", ".", "result", "(", ")", "if", "http_response", ".", "status_code", ...
Get logs of a job.
[ "Get", "logs", "of", "a", "job", "." ]
abf31d9f495e0d93171c43fc4a414cd292091b11
https://github.com/reanahub/reana-commons/blob/abf31d9f495e0d93171c43fc4a414cd292091b11/reana_commons/api_client.py#L115-L122
8,718
reanahub/reana-commons
reana_commons/api_client.py
JobControllerAPIClient.check_if_cached
def check_if_cached(self, job_spec, step, workflow_workspace): """Check if job result is in cache.""" response, http_response = self._client.job_cache.check_if_cached( job_spec=json.dumps(job_spec), workflow_json=json.dumps(step), workflow_workspace=workflow_workspace).\ result() if http_response.status_code == 400: raise HTTPBadRequest('Bad request to check cache. Error: {}'. format(http_response.data)) elif http_response.status_code == 500: raise HTTPInternalServerError('Internal Server Error. Error: {}'. format(http_response.data)) return http_response
python
def check_if_cached(self, job_spec, step, workflow_workspace): response, http_response = self._client.job_cache.check_if_cached( job_spec=json.dumps(job_spec), workflow_json=json.dumps(step), workflow_workspace=workflow_workspace).\ result() if http_response.status_code == 400: raise HTTPBadRequest('Bad request to check cache. Error: {}'. format(http_response.data)) elif http_response.status_code == 500: raise HTTPInternalServerError('Internal Server Error. Error: {}'. format(http_response.data)) return http_response
[ "def", "check_if_cached", "(", "self", ",", "job_spec", ",", "step", ",", "workflow_workspace", ")", ":", "response", ",", "http_response", "=", "self", ".", "_client", ".", "job_cache", ".", "check_if_cached", "(", "job_spec", "=", "json", ".", "dumps", "("...
Check if job result is in cache.
[ "Check", "if", "job", "result", "is", "in", "cache", "." ]
abf31d9f495e0d93171c43fc4a414cd292091b11
https://github.com/reanahub/reana-commons/blob/abf31d9f495e0d93171c43fc4a414cd292091b11/reana_commons/api_client.py#L124-L137
8,719
jbaiter/gphoto2-cffi
gphoto2cffi/backend.py
_logging_callback
def _logging_callback(level, domain, message, data): """ Callback that outputs libgphoto2's logging message via Python's standard logging facilities. :param level: libgphoto2 logging level :param domain: component the message originates from :param message: logging message :param data: Other data in the logging record (unused) """ domain = ffi.string(domain).decode() message = ffi.string(message).decode() logger = LOGGER.getChild(domain) if level not in LOG_LEVELS: return logger.log(LOG_LEVELS[level], message)
python
def _logging_callback(level, domain, message, data): domain = ffi.string(domain).decode() message = ffi.string(message).decode() logger = LOGGER.getChild(domain) if level not in LOG_LEVELS: return logger.log(LOG_LEVELS[level], message)
[ "def", "_logging_callback", "(", "level", ",", "domain", ",", "message", ",", "data", ")", ":", "domain", "=", "ffi", ".", "string", "(", "domain", ")", ".", "decode", "(", ")", "message", "=", "ffi", ".", "string", "(", "message", ")", ".", "decode"...
Callback that outputs libgphoto2's logging message via Python's standard logging facilities. :param level: libgphoto2 logging level :param domain: component the message originates from :param message: logging message :param data: Other data in the logging record (unused)
[ "Callback", "that", "outputs", "libgphoto2", "s", "logging", "message", "via", "Python", "s", "standard", "logging", "facilities", "." ]
2876d15a58174bd24613cd4106a3ef0cefd48050
https://github.com/jbaiter/gphoto2-cffi/blob/2876d15a58174bd24613cd4106a3ef0cefd48050/gphoto2cffi/backend.py#L75-L90
8,720
jazzband/django-queued-storage
queued_storage/tasks.py
Transfer.run
def run(self, name, cache_key, local_path, remote_path, local_options, remote_options, **kwargs): """ The main work horse of the transfer task. Calls the transfer method with the local and remote storage backends as given with the parameters. :param name: name of the file to transfer :type name: str :param local_path: local storage class to transfer from :type local_path: str :param local_options: options of the local storage class :type local_options: dict :param remote_path: remote storage class to transfer to :type remote_path: str :param remote_options: options of the remote storage class :type remote_options: dict :param cache_key: cache key to set after a successful transfer :type cache_key: str :rtype: task result """ local = import_attribute(local_path)(**local_options) remote = import_attribute(remote_path)(**remote_options) result = self.transfer(name, local, remote, **kwargs) if result is True: cache.set(cache_key, True) file_transferred.send(sender=self.__class__, name=name, local=local, remote=remote) elif result is False: args = [name, cache_key, local_path, remote_path, local_options, remote_options] self.retry(args=args, kwargs=kwargs) else: raise ValueError("Task '%s' did not return True/False but %s" % (self.__class__, result)) return result
python
def run(self, name, cache_key, local_path, remote_path, local_options, remote_options, **kwargs): local = import_attribute(local_path)(**local_options) remote = import_attribute(remote_path)(**remote_options) result = self.transfer(name, local, remote, **kwargs) if result is True: cache.set(cache_key, True) file_transferred.send(sender=self.__class__, name=name, local=local, remote=remote) elif result is False: args = [name, cache_key, local_path, remote_path, local_options, remote_options] self.retry(args=args, kwargs=kwargs) else: raise ValueError("Task '%s' did not return True/False but %s" % (self.__class__, result)) return result
[ "def", "run", "(", "self", ",", "name", ",", "cache_key", ",", "local_path", ",", "remote_path", ",", "local_options", ",", "remote_options", ",", "*", "*", "kwargs", ")", ":", "local", "=", "import_attribute", "(", "local_path", ")", "(", "*", "*", "loc...
The main work horse of the transfer task. Calls the transfer method with the local and remote storage backends as given with the parameters. :param name: name of the file to transfer :type name: str :param local_path: local storage class to transfer from :type local_path: str :param local_options: options of the local storage class :type local_options: dict :param remote_path: remote storage class to transfer to :type remote_path: str :param remote_options: options of the remote storage class :type remote_options: dict :param cache_key: cache key to set after a successful transfer :type cache_key: str :rtype: task result
[ "The", "main", "work", "horse", "of", "the", "transfer", "task", ".", "Calls", "the", "transfer", "method", "with", "the", "local", "and", "remote", "storage", "backends", "as", "given", "with", "the", "parameters", "." ]
f8225d88a01ef5ca8001aeb3f7f80818a022a12d
https://github.com/jazzband/django-queued-storage/blob/f8225d88a01ef5ca8001aeb3f7f80818a022a12d/queued_storage/tasks.py#L63-L100
8,721
jazzband/django-queued-storage
queued_storage/tasks.py
Transfer.transfer
def transfer(self, name, local, remote, **kwargs): """ Transfers the file with the given name from the local to the remote storage backend. :param name: The name of the file to transfer :param local: The local storage backend instance :param remote: The remote storage backend instance :returns: `True` when the transfer succeeded, `False` if not. Retries the task when returning `False` :rtype: bool """ try: remote.save(name, local.open(name)) return True except Exception as e: logger.error("Unable to save '%s' to remote storage. " "About to retry." % name) logger.exception(e) return False
python
def transfer(self, name, local, remote, **kwargs): try: remote.save(name, local.open(name)) return True except Exception as e: logger.error("Unable to save '%s' to remote storage. " "About to retry." % name) logger.exception(e) return False
[ "def", "transfer", "(", "self", ",", "name", ",", "local", ",", "remote", ",", "*", "*", "kwargs", ")", ":", "try", ":", "remote", ".", "save", "(", "name", ",", "local", ".", "open", "(", "name", ")", ")", "return", "True", "except", "Exception", ...
Transfers the file with the given name from the local to the remote storage backend. :param name: The name of the file to transfer :param local: The local storage backend instance :param remote: The remote storage backend instance :returns: `True` when the transfer succeeded, `False` if not. Retries the task when returning `False` :rtype: bool
[ "Transfers", "the", "file", "with", "the", "given", "name", "from", "the", "local", "to", "the", "remote", "storage", "backend", "." ]
f8225d88a01ef5ca8001aeb3f7f80818a022a12d
https://github.com/jazzband/django-queued-storage/blob/f8225d88a01ef5ca8001aeb3f7f80818a022a12d/queued_storage/tasks.py#L102-L121
8,722
jbaiter/gphoto2-cffi
gphoto2cffi/util.py
get_string
def get_string(cfunc, *args): """ Call a C function and return its return value as a Python string. :param cfunc: C function to call :param args: Arguments to call function with :rtype: str """ cstr = get_ctype("const char**", cfunc, *args) return backend.ffi.string(cstr).decode() if cstr else None
python
def get_string(cfunc, *args): cstr = get_ctype("const char**", cfunc, *args) return backend.ffi.string(cstr).decode() if cstr else None
[ "def", "get_string", "(", "cfunc", ",", "*", "args", ")", ":", "cstr", "=", "get_ctype", "(", "\"const char**\"", ",", "cfunc", ",", "*", "args", ")", "return", "backend", ".", "ffi", ".", "string", "(", "cstr", ")", ".", "decode", "(", ")", "if", ...
Call a C function and return its return value as a Python string. :param cfunc: C function to call :param args: Arguments to call function with :rtype: str
[ "Call", "a", "C", "function", "and", "return", "its", "return", "value", "as", "a", "Python", "string", "." ]
2876d15a58174bd24613cd4106a3ef0cefd48050
https://github.com/jbaiter/gphoto2-cffi/blob/2876d15a58174bd24613cd4106a3ef0cefd48050/gphoto2cffi/util.py#L26-L34
8,723
jbaiter/gphoto2-cffi
gphoto2cffi/util.py
get_ctype
def get_ctype(rtype, cfunc, *args): """ Call a C function that takes a pointer as its last argument and return the C object that it contains after the function has finished. :param rtype: C data type is filled by the function :param cfunc: C function to call :param args: Arguments to call function with :return: A pointer to the specified data type """ val_p = backend.ffi.new(rtype) args = args + (val_p,) cfunc(*args) return val_p[0]
python
def get_ctype(rtype, cfunc, *args): val_p = backend.ffi.new(rtype) args = args + (val_p,) cfunc(*args) return val_p[0]
[ "def", "get_ctype", "(", "rtype", ",", "cfunc", ",", "*", "args", ")", ":", "val_p", "=", "backend", ".", "ffi", ".", "new", "(", "rtype", ")", "args", "=", "args", "+", "(", "val_p", ",", ")", "cfunc", "(", "*", "args", ")", "return", "val_p", ...
Call a C function that takes a pointer as its last argument and return the C object that it contains after the function has finished. :param rtype: C data type is filled by the function :param cfunc: C function to call :param args: Arguments to call function with :return: A pointer to the specified data type
[ "Call", "a", "C", "function", "that", "takes", "a", "pointer", "as", "its", "last", "argument", "and", "return", "the", "C", "object", "that", "it", "contains", "after", "the", "function", "has", "finished", "." ]
2876d15a58174bd24613cd4106a3ef0cefd48050
https://github.com/jbaiter/gphoto2-cffi/blob/2876d15a58174bd24613cd4106a3ef0cefd48050/gphoto2cffi/util.py#L37-L49
8,724
jbaiter/gphoto2-cffi
gphoto2cffi/util.py
new_gp_object
def new_gp_object(typename): """ Create an indirect pointer to a GPhoto2 type, call its matching constructor function and return the pointer to it. :param typename: Name of the type to create. :return: A pointer to the specified data type. """ obj_p = backend.ffi.new("{0}**".format(typename)) backend.CONSTRUCTORS[typename](obj_p) return obj_p[0]
python
def new_gp_object(typename): obj_p = backend.ffi.new("{0}**".format(typename)) backend.CONSTRUCTORS[typename](obj_p) return obj_p[0]
[ "def", "new_gp_object", "(", "typename", ")", ":", "obj_p", "=", "backend", ".", "ffi", ".", "new", "(", "\"{0}**\"", ".", "format", "(", "typename", ")", ")", "backend", ".", "CONSTRUCTORS", "[", "typename", "]", "(", "obj_p", ")", "return", "obj_p", ...
Create an indirect pointer to a GPhoto2 type, call its matching constructor function and return the pointer to it. :param typename: Name of the type to create. :return: A pointer to the specified data type.
[ "Create", "an", "indirect", "pointer", "to", "a", "GPhoto2", "type", "call", "its", "matching", "constructor", "function", "and", "return", "the", "pointer", "to", "it", "." ]
2876d15a58174bd24613cd4106a3ef0cefd48050
https://github.com/jbaiter/gphoto2-cffi/blob/2876d15a58174bd24613cd4106a3ef0cefd48050/gphoto2cffi/util.py#L52-L61
8,725
jbaiter/gphoto2-cffi
gphoto2cffi/gphoto2.py
get_library_version
def get_library_version(): """ Get the version number of the underlying gphoto2 library. :return: The version :rtype: tuple of (major, minor, patch) version numbers """ version_str = ffi.string(lib.gp_library_version(True)[0]).decode() return tuple(int(x) for x in version_str.split('.'))
python
def get_library_version(): version_str = ffi.string(lib.gp_library_version(True)[0]).decode() return tuple(int(x) for x in version_str.split('.'))
[ "def", "get_library_version", "(", ")", ":", "version_str", "=", "ffi", ".", "string", "(", "lib", ".", "gp_library_version", "(", "True", ")", "[", "0", "]", ")", ".", "decode", "(", ")", "return", "tuple", "(", "int", "(", "x", ")", "for", "x", "...
Get the version number of the underlying gphoto2 library. :return: The version :rtype: tuple of (major, minor, patch) version numbers
[ "Get", "the", "version", "number", "of", "the", "underlying", "gphoto2", "library", "." ]
2876d15a58174bd24613cd4106a3ef0cefd48050
https://github.com/jbaiter/gphoto2-cffi/blob/2876d15a58174bd24613cd4106a3ef0cefd48050/gphoto2cffi/gphoto2.py#L23-L30
8,726
jbaiter/gphoto2-cffi
gphoto2cffi/gphoto2.py
list_cameras
def list_cameras(): """ List all attached USB cameras that are supported by libgphoto2. :return: All recognized cameras :rtype: list of :py:class:`Camera` """ ctx = lib.gp_context_new() camlist_p = new_gp_object("CameraList") port_list_p = new_gp_object("GPPortInfoList") lib.gp_port_info_list_load(port_list_p) abilities_list_p = new_gp_object("CameraAbilitiesList") lib.gp_abilities_list_load(abilities_list_p, ctx) lib.gp_abilities_list_detect(abilities_list_p, port_list_p, camlist_p, ctx) out = [] for idx in range(lib.gp_list_count(camlist_p)): name = get_string(lib.gp_list_get_name, camlist_p, idx) value = get_string(lib.gp_list_get_value, camlist_p, idx) # Skip iteration if no matches matches = re.match(r"usb:(\d+),(\d+)", value) if not matches: continue bus_no, device_no = (int(x) for x in matches.groups()) abilities = ffi.new("CameraAbilities*") ability_idx = lib.gp_abilities_list_lookup_model( abilities_list_p, name.encode()) lib.gp_abilities_list_get_abilities(abilities_list_p, ability_idx, abilities) if abilities.device_type == lib.GP_DEVICE_STILL_CAMERA: out.append(Camera(bus_no, device_no, lazy=True, _abilities=abilities)) lib.gp_list_free(camlist_p) lib.gp_port_info_list_free(port_list_p) lib.gp_abilities_list_free(abilities_list_p) return out
python
def list_cameras(): ctx = lib.gp_context_new() camlist_p = new_gp_object("CameraList") port_list_p = new_gp_object("GPPortInfoList") lib.gp_port_info_list_load(port_list_p) abilities_list_p = new_gp_object("CameraAbilitiesList") lib.gp_abilities_list_load(abilities_list_p, ctx) lib.gp_abilities_list_detect(abilities_list_p, port_list_p, camlist_p, ctx) out = [] for idx in range(lib.gp_list_count(camlist_p)): name = get_string(lib.gp_list_get_name, camlist_p, idx) value = get_string(lib.gp_list_get_value, camlist_p, idx) # Skip iteration if no matches matches = re.match(r"usb:(\d+),(\d+)", value) if not matches: continue bus_no, device_no = (int(x) for x in matches.groups()) abilities = ffi.new("CameraAbilities*") ability_idx = lib.gp_abilities_list_lookup_model( abilities_list_p, name.encode()) lib.gp_abilities_list_get_abilities(abilities_list_p, ability_idx, abilities) if abilities.device_type == lib.GP_DEVICE_STILL_CAMERA: out.append(Camera(bus_no, device_no, lazy=True, _abilities=abilities)) lib.gp_list_free(camlist_p) lib.gp_port_info_list_free(port_list_p) lib.gp_abilities_list_free(abilities_list_p) return out
[ "def", "list_cameras", "(", ")", ":", "ctx", "=", "lib", ".", "gp_context_new", "(", ")", "camlist_p", "=", "new_gp_object", "(", "\"CameraList\"", ")", "port_list_p", "=", "new_gp_object", "(", "\"GPPortInfoList\"", ")", "lib", ".", "gp_port_info_list_load", "(...
List all attached USB cameras that are supported by libgphoto2. :return: All recognized cameras :rtype: list of :py:class:`Camera`
[ "List", "all", "attached", "USB", "cameras", "that", "are", "supported", "by", "libgphoto2", "." ]
2876d15a58174bd24613cd4106a3ef0cefd48050
https://github.com/jbaiter/gphoto2-cffi/blob/2876d15a58174bd24613cd4106a3ef0cefd48050/gphoto2cffi/gphoto2.py#L33-L69
8,727
jbaiter/gphoto2-cffi
gphoto2cffi/gphoto2.py
supported_cameras
def supported_cameras(): """ List the names of all cameras supported by libgphoto2, grouped by the name of their driver. """ ctx = lib.gp_context_new() abilities_list_p = new_gp_object("CameraAbilitiesList") lib.gp_abilities_list_load(abilities_list_p, ctx) abilities = ffi.new("CameraAbilities*") out = [] for idx in range(lib.gp_abilities_list_count(abilities_list_p)): lib.gp_abilities_list_get_abilities(abilities_list_p, idx, abilities) if abilities.device_type == lib.GP_DEVICE_STILL_CAMERA: libname = os.path.basename(ffi.string(abilities.library) .decode()) out.append((ffi.string(abilities.model).decode(), libname)) lib.gp_abilities_list_free(abilities_list_p) key_func = lambda name, driver: driver out = sorted(out, key=key_func) return {k: tuple(x[0] for x in v) for k, v in itertools.groupby(out, key_func)} return out
python
def supported_cameras(): ctx = lib.gp_context_new() abilities_list_p = new_gp_object("CameraAbilitiesList") lib.gp_abilities_list_load(abilities_list_p, ctx) abilities = ffi.new("CameraAbilities*") out = [] for idx in range(lib.gp_abilities_list_count(abilities_list_p)): lib.gp_abilities_list_get_abilities(abilities_list_p, idx, abilities) if abilities.device_type == lib.GP_DEVICE_STILL_CAMERA: libname = os.path.basename(ffi.string(abilities.library) .decode()) out.append((ffi.string(abilities.model).decode(), libname)) lib.gp_abilities_list_free(abilities_list_p) key_func = lambda name, driver: driver out = sorted(out, key=key_func) return {k: tuple(x[0] for x in v) for k, v in itertools.groupby(out, key_func)} return out
[ "def", "supported_cameras", "(", ")", ":", "ctx", "=", "lib", ".", "gp_context_new", "(", ")", "abilities_list_p", "=", "new_gp_object", "(", "\"CameraAbilitiesList\"", ")", "lib", ".", "gp_abilities_list_load", "(", "abilities_list_p", ",", "ctx", ")", "abilities...
List the names of all cameras supported by libgphoto2, grouped by the name of their driver.
[ "List", "the", "names", "of", "all", "cameras", "supported", "by", "libgphoto2", "grouped", "by", "the", "name", "of", "their", "driver", "." ]
2876d15a58174bd24613cd4106a3ef0cefd48050
https://github.com/jbaiter/gphoto2-cffi/blob/2876d15a58174bd24613cd4106a3ef0cefd48050/gphoto2cffi/gphoto2.py#L72-L92
8,728
jbaiter/gphoto2-cffi
gphoto2cffi/gphoto2.py
VideoCaptureContext.stop
def stop(self): """ Stop the capture. """ self.camera._get_config()['actions']['movie'].set(False) self.videofile = self.camera._wait_for_event( event_type=lib.GP_EVENT_FILE_ADDED) if self._old_captarget != "Memory card": self.camera.config['settings']['capturetarget'].set( self._old_captarget)
python
def stop(self): self.camera._get_config()['actions']['movie'].set(False) self.videofile = self.camera._wait_for_event( event_type=lib.GP_EVENT_FILE_ADDED) if self._old_captarget != "Memory card": self.camera.config['settings']['capturetarget'].set( self._old_captarget)
[ "def", "stop", "(", "self", ")", ":", "self", ".", "camera", ".", "_get_config", "(", ")", "[", "'actions'", "]", "[", "'movie'", "]", ".", "set", "(", "False", ")", "self", ".", "videofile", "=", "self", ".", "camera", ".", "_wait_for_event", "(", ...
Stop the capture.
[ "Stop", "the", "capture", "." ]
2876d15a58174bd24613cd4106a3ef0cefd48050
https://github.com/jbaiter/gphoto2-cffi/blob/2876d15a58174bd24613cd4106a3ef0cefd48050/gphoto2cffi/gphoto2.py#L154-L161
8,729
jbaiter/gphoto2-cffi
gphoto2cffi/gphoto2.py
Directory.path
def path(self): """ Absolute path to the directory on the camera's filesystem. """ if self.parent is None: return "/" else: return os.path.join(self.parent.path, self.name)
python
def path(self): if self.parent is None: return "/" else: return os.path.join(self.parent.path, self.name)
[ "def", "path", "(", "self", ")", ":", "if", "self", ".", "parent", "is", "None", ":", "return", "\"/\"", "else", ":", "return", "os", ".", "path", ".", "join", "(", "self", ".", "parent", ".", "path", ",", "self", ".", "name", ")" ]
Absolute path to the directory on the camera's filesystem.
[ "Absolute", "path", "to", "the", "directory", "on", "the", "camera", "s", "filesystem", "." ]
2876d15a58174bd24613cd4106a3ef0cefd48050
https://github.com/jbaiter/gphoto2-cffi/blob/2876d15a58174bd24613cd4106a3ef0cefd48050/gphoto2cffi/gphoto2.py#L182-L187
8,730
jbaiter/gphoto2-cffi
gphoto2cffi/gphoto2.py
Directory.supported_operations
def supported_operations(self): """ All directory operations supported by the camera. """ return tuple(op for op in backend.DIR_OPS if self._dir_ops & op)
python
def supported_operations(self): return tuple(op for op in backend.DIR_OPS if self._dir_ops & op)
[ "def", "supported_operations", "(", "self", ")", ":", "return", "tuple", "(", "op", "for", "op", "in", "backend", ".", "DIR_OPS", "if", "self", ".", "_dir_ops", "&", "op", ")" ]
All directory operations supported by the camera.
[ "All", "directory", "operations", "supported", "by", "the", "camera", "." ]
2876d15a58174bd24613cd4106a3ef0cefd48050
https://github.com/jbaiter/gphoto2-cffi/blob/2876d15a58174bd24613cd4106a3ef0cefd48050/gphoto2cffi/gphoto2.py#L190-L192
8,731
jbaiter/gphoto2-cffi
gphoto2cffi/gphoto2.py
Directory.exists
def exists(self): """ Check whether the directory exists on the camera. """ if self.name in ("", "/") and self.parent is None: return True else: return self in self.parent.directories
python
def exists(self): if self.name in ("", "/") and self.parent is None: return True else: return self in self.parent.directories
[ "def", "exists", "(", "self", ")", ":", "if", "self", ".", "name", "in", "(", "\"\"", ",", "\"/\"", ")", "and", "self", ".", "parent", "is", "None", ":", "return", "True", "else", ":", "return", "self", "in", "self", ".", "parent", ".", "directorie...
Check whether the directory exists on the camera.
[ "Check", "whether", "the", "directory", "exists", "on", "the", "camera", "." ]
2876d15a58174bd24613cd4106a3ef0cefd48050
https://github.com/jbaiter/gphoto2-cffi/blob/2876d15a58174bd24613cd4106a3ef0cefd48050/gphoto2cffi/gphoto2.py#L195-L200
8,732
jbaiter/gphoto2-cffi
gphoto2cffi/gphoto2.py
Directory.files
def files(self): """ Get a generator that yields all files in the directory. """ filelist_p = new_gp_object("CameraList") lib.gp_camera_folder_list_files(self._cam._cam, self.path.encode(), filelist_p, self._cam._ctx) for idx in range(lib.gp_list_count(filelist_p)): fname = get_string(lib.gp_list_get_name, filelist_p, idx) yield File(name=fname, directory=self, camera=self._cam) lib.gp_list_free(filelist_p)
python
def files(self): filelist_p = new_gp_object("CameraList") lib.gp_camera_folder_list_files(self._cam._cam, self.path.encode(), filelist_p, self._cam._ctx) for idx in range(lib.gp_list_count(filelist_p)): fname = get_string(lib.gp_list_get_name, filelist_p, idx) yield File(name=fname, directory=self, camera=self._cam) lib.gp_list_free(filelist_p)
[ "def", "files", "(", "self", ")", ":", "filelist_p", "=", "new_gp_object", "(", "\"CameraList\"", ")", "lib", ".", "gp_camera_folder_list_files", "(", "self", ".", "_cam", ".", "_cam", ",", "self", ".", "path", ".", "encode", "(", ")", ",", "filelist_p", ...
Get a generator that yields all files in the directory.
[ "Get", "a", "generator", "that", "yields", "all", "files", "in", "the", "directory", "." ]
2876d15a58174bd24613cd4106a3ef0cefd48050
https://github.com/jbaiter/gphoto2-cffi/blob/2876d15a58174bd24613cd4106a3ef0cefd48050/gphoto2cffi/gphoto2.py#L204-L212
8,733
jbaiter/gphoto2-cffi
gphoto2cffi/gphoto2.py
Directory.directories
def directories(self): """ Get a generator that yields all subdirectories in the directory. """ dirlist_p = new_gp_object("CameraList") lib.gp_camera_folder_list_folders(self._cam._cam, self.path.encode(), dirlist_p, self._cam._ctx) for idx in range(lib.gp_list_count(dirlist_p)): name = os.path.join( self.path, get_string(lib.gp_list_get_name, dirlist_p, idx)) yield Directory(name=name, parent=self, camera=self._cam) lib.gp_list_free(dirlist_p)
python
def directories(self): dirlist_p = new_gp_object("CameraList") lib.gp_camera_folder_list_folders(self._cam._cam, self.path.encode(), dirlist_p, self._cam._ctx) for idx in range(lib.gp_list_count(dirlist_p)): name = os.path.join( self.path, get_string(lib.gp_list_get_name, dirlist_p, idx)) yield Directory(name=name, parent=self, camera=self._cam) lib.gp_list_free(dirlist_p)
[ "def", "directories", "(", "self", ")", ":", "dirlist_p", "=", "new_gp_object", "(", "\"CameraList\"", ")", "lib", ".", "gp_camera_folder_list_folders", "(", "self", ".", "_cam", ".", "_cam", ",", "self", ".", "path", ".", "encode", "(", ")", ",", "dirlist...
Get a generator that yields all subdirectories in the directory.
[ "Get", "a", "generator", "that", "yields", "all", "subdirectories", "in", "the", "directory", "." ]
2876d15a58174bd24613cd4106a3ef0cefd48050
https://github.com/jbaiter/gphoto2-cffi/blob/2876d15a58174bd24613cd4106a3ef0cefd48050/gphoto2cffi/gphoto2.py#L216-L226
8,734
jbaiter/gphoto2-cffi
gphoto2cffi/gphoto2.py
Directory.create
def create(self): """ Create the directory. """ lib.gp_camera_folder_make_dir( self._cam._cam, self.parent.path.encode(), self.name.encode(), self._cam._ctx)
python
def create(self): lib.gp_camera_folder_make_dir( self._cam._cam, self.parent.path.encode(), self.name.encode(), self._cam._ctx)
[ "def", "create", "(", "self", ")", ":", "lib", ".", "gp_camera_folder_make_dir", "(", "self", ".", "_cam", ".", "_cam", ",", "self", ".", "parent", ".", "path", ".", "encode", "(", ")", ",", "self", ".", "name", ".", "encode", "(", ")", ",", "self"...
Create the directory.
[ "Create", "the", "directory", "." ]
2876d15a58174bd24613cd4106a3ef0cefd48050
https://github.com/jbaiter/gphoto2-cffi/blob/2876d15a58174bd24613cd4106a3ef0cefd48050/gphoto2cffi/gphoto2.py#L229-L233
8,735
jbaiter/gphoto2-cffi
gphoto2cffi/gphoto2.py
Directory.remove
def remove(self): """ Remove the directory. """ lib.gp_camera_folder_remove_dir( self._cam._cam, self.parent.path.encode(), self.name.encode(), self._cam._ctx)
python
def remove(self): lib.gp_camera_folder_remove_dir( self._cam._cam, self.parent.path.encode(), self.name.encode(), self._cam._ctx)
[ "def", "remove", "(", "self", ")", ":", "lib", ".", "gp_camera_folder_remove_dir", "(", "self", ".", "_cam", ".", "_cam", ",", "self", ".", "parent", ".", "path", ".", "encode", "(", ")", ",", "self", ".", "name", ".", "encode", "(", ")", ",", "sel...
Remove the directory.
[ "Remove", "the", "directory", "." ]
2876d15a58174bd24613cd4106a3ef0cefd48050
https://github.com/jbaiter/gphoto2-cffi/blob/2876d15a58174bd24613cd4106a3ef0cefd48050/gphoto2cffi/gphoto2.py#L236-L240
8,736
jbaiter/gphoto2-cffi
gphoto2cffi/gphoto2.py
Directory.upload
def upload(self, local_path): """ Upload a file to the camera's permanent storage. :param local_path: Path to file to copy :type local_path: str/unicode """ camerafile_p = ffi.new("CameraFile**") with open(local_path, 'rb') as fp: lib.gp_file_new_from_fd(camerafile_p, fp.fileno()) lib.gp_camera_folder_put_file( self._cam._cam, self.path.encode() + b"/", os.path.basename(local_path).encode(), backend.FILE_TYPES['normal'], camerafile_p[0], self._cam.ctx)
python
def upload(self, local_path): camerafile_p = ffi.new("CameraFile**") with open(local_path, 'rb') as fp: lib.gp_file_new_from_fd(camerafile_p, fp.fileno()) lib.gp_camera_folder_put_file( self._cam._cam, self.path.encode() + b"/", os.path.basename(local_path).encode(), backend.FILE_TYPES['normal'], camerafile_p[0], self._cam.ctx)
[ "def", "upload", "(", "self", ",", "local_path", ")", ":", "camerafile_p", "=", "ffi", ".", "new", "(", "\"CameraFile**\"", ")", "with", "open", "(", "local_path", ",", "'rb'", ")", "as", "fp", ":", "lib", ".", "gp_file_new_from_fd", "(", "camerafile_p", ...
Upload a file to the camera's permanent storage. :param local_path: Path to file to copy :type local_path: str/unicode
[ "Upload", "a", "file", "to", "the", "camera", "s", "permanent", "storage", "." ]
2876d15a58174bd24613cd4106a3ef0cefd48050
https://github.com/jbaiter/gphoto2-cffi/blob/2876d15a58174bd24613cd4106a3ef0cefd48050/gphoto2cffi/gphoto2.py#L243-L256
8,737
jbaiter/gphoto2-cffi
gphoto2cffi/gphoto2.py
File.supported_operations
def supported_operations(self): """ All file operations supported by the camera. """ return tuple(op for op in backend.FILE_OPS if self._operations & op)
python
def supported_operations(self): return tuple(op for op in backend.FILE_OPS if self._operations & op)
[ "def", "supported_operations", "(", "self", ")", ":", "return", "tuple", "(", "op", "for", "op", "in", "backend", ".", "FILE_OPS", "if", "self", ".", "_operations", "&", "op", ")" ]
All file operations supported by the camera.
[ "All", "file", "operations", "supported", "by", "the", "camera", "." ]
2876d15a58174bd24613cd4106a3ef0cefd48050
https://github.com/jbaiter/gphoto2-cffi/blob/2876d15a58174bd24613cd4106a3ef0cefd48050/gphoto2cffi/gphoto2.py#L277-L279
8,738
jbaiter/gphoto2-cffi
gphoto2cffi/gphoto2.py
File.dimensions
def dimensions(self): """ Dimensions of the image. :rtype: :py:class:`ImageDimensions` """ return ImageDimensions(self._info.file.width, self._info.file.height)
python
def dimensions(self): return ImageDimensions(self._info.file.width, self._info.file.height)
[ "def", "dimensions", "(", "self", ")", ":", "return", "ImageDimensions", "(", "self", ".", "_info", ".", "file", ".", "width", ",", "self", ".", "_info", ".", "file", ".", "height", ")" ]
Dimensions of the image. :rtype: :py:class:`ImageDimensions`
[ "Dimensions", "of", "the", "image", "." ]
2876d15a58174bd24613cd4106a3ef0cefd48050
https://github.com/jbaiter/gphoto2-cffi/blob/2876d15a58174bd24613cd4106a3ef0cefd48050/gphoto2cffi/gphoto2.py#L298-L303
8,739
jbaiter/gphoto2-cffi
gphoto2cffi/gphoto2.py
File.permissions
def permissions(self): """ Permissions of the file. Can be "r-" (read-only), "-w" (write-only), "rw" (read-write) or "--" (no rights). :rtype: str """ can_read = self._info.file.permissions & lib.GP_FILE_PERM_READ can_write = self._info.file.permissions & lib.GP_FILE_PERM_DELETE return "{0}{1}".format("r" if can_read else "-", "w" if can_write else "-")
python
def permissions(self): can_read = self._info.file.permissions & lib.GP_FILE_PERM_READ can_write = self._info.file.permissions & lib.GP_FILE_PERM_DELETE return "{0}{1}".format("r" if can_read else "-", "w" if can_write else "-")
[ "def", "permissions", "(", "self", ")", ":", "can_read", "=", "self", ".", "_info", ".", "file", ".", "permissions", "&", "lib", ".", "GP_FILE_PERM_READ", "can_write", "=", "self", ".", "_info", ".", "file", ".", "permissions", "&", "lib", ".", "GP_FILE_...
Permissions of the file. Can be "r-" (read-only), "-w" (write-only), "rw" (read-write) or "--" (no rights). :rtype: str
[ "Permissions", "of", "the", "file", "." ]
2876d15a58174bd24613cd4106a3ef0cefd48050
https://github.com/jbaiter/gphoto2-cffi/blob/2876d15a58174bd24613cd4106a3ef0cefd48050/gphoto2cffi/gphoto2.py#L306-L317
8,740
jbaiter/gphoto2-cffi
gphoto2cffi/gphoto2.py
File.save
def save(self, target_path, ftype='normal'): """ Save file content to a local file. :param target_path: Path to save remote file as. :type target_path: str/unicode :param ftype: Select 'view' on file. :type ftype: str """ camfile_p = ffi.new("CameraFile**") with open(target_path, 'wb') as fp: lib.gp_file_new_from_fd(camfile_p, fp.fileno()) lib.gp_camera_file_get( self._cam._cam, self.directory.path.encode(), self.name.encode(), backend.FILE_TYPES[ftype], camfile_p[0], self._cam._ctx)
python
def save(self, target_path, ftype='normal'): camfile_p = ffi.new("CameraFile**") with open(target_path, 'wb') as fp: lib.gp_file_new_from_fd(camfile_p, fp.fileno()) lib.gp_camera_file_get( self._cam._cam, self.directory.path.encode(), self.name.encode(), backend.FILE_TYPES[ftype], camfile_p[0], self._cam._ctx)
[ "def", "save", "(", "self", ",", "target_path", ",", "ftype", "=", "'normal'", ")", ":", "camfile_p", "=", "ffi", ".", "new", "(", "\"CameraFile**\"", ")", "with", "open", "(", "target_path", ",", "'wb'", ")", "as", "fp", ":", "lib", ".", "gp_file_new_...
Save file content to a local file. :param target_path: Path to save remote file as. :type target_path: str/unicode :param ftype: Select 'view' on file. :type ftype: str
[ "Save", "file", "content", "to", "a", "local", "file", "." ]
2876d15a58174bd24613cd4106a3ef0cefd48050
https://github.com/jbaiter/gphoto2-cffi/blob/2876d15a58174bd24613cd4106a3ef0cefd48050/gphoto2cffi/gphoto2.py#L328-L342
8,741
jbaiter/gphoto2-cffi
gphoto2cffi/gphoto2.py
File.get_data
def get_data(self, ftype='normal'): """ Get file content as a bytestring. :param ftype: Select 'view' on file. :type ftype: str :return: File content :rtype: bytes """ camfile_p = ffi.new("CameraFile**") lib.gp_file_new(camfile_p) lib.gp_camera_file_get( self._cam._cam, self.directory.path.encode(), self.name.encode(), backend.FILE_TYPES[ftype], camfile_p[0], self._cam._ctx) data_p = ffi.new("char**") length_p = ffi.new("unsigned long*") lib.gp_file_get_data_and_size(camfile_p[0], data_p, length_p) byt = bytes(ffi.buffer(data_p[0], length_p[0])) # gphoto2 camera files MUST be freed. lib.gp_file_free(camfile_p[0]) # just to be safe. del data_p, length_p, camfile_p return byt
python
def get_data(self, ftype='normal'): camfile_p = ffi.new("CameraFile**") lib.gp_file_new(camfile_p) lib.gp_camera_file_get( self._cam._cam, self.directory.path.encode(), self.name.encode(), backend.FILE_TYPES[ftype], camfile_p[0], self._cam._ctx) data_p = ffi.new("char**") length_p = ffi.new("unsigned long*") lib.gp_file_get_data_and_size(camfile_p[0], data_p, length_p) byt = bytes(ffi.buffer(data_p[0], length_p[0])) # gphoto2 camera files MUST be freed. lib.gp_file_free(camfile_p[0]) # just to be safe. del data_p, length_p, camfile_p return byt
[ "def", "get_data", "(", "self", ",", "ftype", "=", "'normal'", ")", ":", "camfile_p", "=", "ffi", ".", "new", "(", "\"CameraFile**\"", ")", "lib", ".", "gp_file_new", "(", "camfile_p", ")", "lib", ".", "gp_camera_file_get", "(", "self", ".", "_cam", ".",...
Get file content as a bytestring. :param ftype: Select 'view' on file. :type ftype: str :return: File content :rtype: bytes
[ "Get", "file", "content", "as", "a", "bytestring", "." ]
2876d15a58174bd24613cd4106a3ef0cefd48050
https://github.com/jbaiter/gphoto2-cffi/blob/2876d15a58174bd24613cd4106a3ef0cefd48050/gphoto2cffi/gphoto2.py#L345-L366
8,742
jbaiter/gphoto2-cffi
gphoto2cffi/gphoto2.py
File.iter_data
def iter_data(self, chunk_size=2**16, ftype='normal'): """ Get an iterator that yields chunks of the file content. :param chunk_size: Size of yielded chunks in bytes :type chunk_size: int :param ftype: Select 'view' on file. :type ftype: str :return: Iterator """ self._check_type_supported(ftype) buf_p = ffi.new("char[{0}]".format(chunk_size)) size_p = ffi.new("uint64_t*") offset_p = ffi.new("uint64_t*") for chunk_idx in range(int(math.ceil(self.size/chunk_size))): size_p[0] = chunk_size lib.gp_camera_file_read( self._cam._cam, self.directory.path.encode(), self.name.encode(), backend.FILE_TYPES[ftype], offset_p[0], buf_p, size_p, self._cam._ctx) yield ffi.buffer(buf_p, size_p[0])[:]
python
def iter_data(self, chunk_size=2**16, ftype='normal'): self._check_type_supported(ftype) buf_p = ffi.new("char[{0}]".format(chunk_size)) size_p = ffi.new("uint64_t*") offset_p = ffi.new("uint64_t*") for chunk_idx in range(int(math.ceil(self.size/chunk_size))): size_p[0] = chunk_size lib.gp_camera_file_read( self._cam._cam, self.directory.path.encode(), self.name.encode(), backend.FILE_TYPES[ftype], offset_p[0], buf_p, size_p, self._cam._ctx) yield ffi.buffer(buf_p, size_p[0])[:]
[ "def", "iter_data", "(", "self", ",", "chunk_size", "=", "2", "**", "16", ",", "ftype", "=", "'normal'", ")", ":", "self", ".", "_check_type_supported", "(", "ftype", ")", "buf_p", "=", "ffi", ".", "new", "(", "\"char[{0}]\"", ".", "format", "(", "chun...
Get an iterator that yields chunks of the file content. :param chunk_size: Size of yielded chunks in bytes :type chunk_size: int :param ftype: Select 'view' on file. :type ftype: str :return: Iterator
[ "Get", "an", "iterator", "that", "yields", "chunks", "of", "the", "file", "content", "." ]
2876d15a58174bd24613cd4106a3ef0cefd48050
https://github.com/jbaiter/gphoto2-cffi/blob/2876d15a58174bd24613cd4106a3ef0cefd48050/gphoto2cffi/gphoto2.py#L369-L388
8,743
jbaiter/gphoto2-cffi
gphoto2cffi/gphoto2.py
File.remove
def remove(self): """ Remove file from device. """ lib.gp_camera_file_delete(self._cam._cam, self.directory.path.encode(), self.name.encode(), self._cam._ctx)
python
def remove(self): lib.gp_camera_file_delete(self._cam._cam, self.directory.path.encode(), self.name.encode(), self._cam._ctx)
[ "def", "remove", "(", "self", ")", ":", "lib", ".", "gp_camera_file_delete", "(", "self", ".", "_cam", ".", "_cam", ",", "self", ".", "directory", ".", "path", ".", "encode", "(", ")", ",", "self", ".", "name", ".", "encode", "(", ")", ",", "self",...
Remove file from device.
[ "Remove", "file", "from", "device", "." ]
2876d15a58174bd24613cd4106a3ef0cefd48050
https://github.com/jbaiter/gphoto2-cffi/blob/2876d15a58174bd24613cd4106a3ef0cefd48050/gphoto2cffi/gphoto2.py#L391-L394
8,744
jbaiter/gphoto2-cffi
gphoto2cffi/gphoto2.py
ConfigItem.set
def set(self, value): """ Update value of the option. Only possible for options with :py:attr:`readonly` set to `False`. If :py:attr:`type` is `choice`, the value must be one of the :py:attr:`choices`. If :py:attr:`type` is `range`, the value must be in the range described by :py:attr:`range`. :param value: Value to set """ if self.readonly: raise ValueError("Option is read-only.") val_p = None if self.type == 'selection': if value not in self.choices: raise ValueError("Invalid choice (valid: {0})".format( repr(self.choices))) val_p = ffi.new("const char[]", value.encode()) elif self.type == 'text': if not isinstance(value, basestring): raise ValueError("Value must be a string.") val_p = ffi.new("char**") val_p[0] = ffi.new("char[]", value.encode()) elif self.type == 'range': if value < self.range.min or value > self.range.max: raise ValueError("Value exceeds valid range ({0}-{1}." .format(self.range.min, self.range.max)) if value % self.range.step: raise ValueError("Value can only be changed in steps of {0}." .format(self.range.step)) val_p = ffi.new("float*") val_p[0] = value elif self.type == 'toggle': if not isinstance(value, bool): raise ValueError("Value must be bool.") val_p = ffi.new("int*") val_p[0] = int(value) elif self.type == 'date': val_p = ffi.new("int*") val_p[0] = value lib.gp_widget_set_value(self._widget, val_p) lib.gp_camera_set_config(self._cam._cam, self._root, self._cam._ctx) self.value = value
python
def set(self, value): if self.readonly: raise ValueError("Option is read-only.") val_p = None if self.type == 'selection': if value not in self.choices: raise ValueError("Invalid choice (valid: {0})".format( repr(self.choices))) val_p = ffi.new("const char[]", value.encode()) elif self.type == 'text': if not isinstance(value, basestring): raise ValueError("Value must be a string.") val_p = ffi.new("char**") val_p[0] = ffi.new("char[]", value.encode()) elif self.type == 'range': if value < self.range.min or value > self.range.max: raise ValueError("Value exceeds valid range ({0}-{1}." .format(self.range.min, self.range.max)) if value % self.range.step: raise ValueError("Value can only be changed in steps of {0}." .format(self.range.step)) val_p = ffi.new("float*") val_p[0] = value elif self.type == 'toggle': if not isinstance(value, bool): raise ValueError("Value must be bool.") val_p = ffi.new("int*") val_p[0] = int(value) elif self.type == 'date': val_p = ffi.new("int*") val_p[0] = value lib.gp_widget_set_value(self._widget, val_p) lib.gp_camera_set_config(self._cam._cam, self._root, self._cam._ctx) self.value = value
[ "def", "set", "(", "self", ",", "value", ")", ":", "if", "self", ".", "readonly", ":", "raise", "ValueError", "(", "\"Option is read-only.\"", ")", "val_p", "=", "None", "if", "self", ".", "type", "==", "'selection'", ":", "if", "value", "not", "in", "...
Update value of the option. Only possible for options with :py:attr:`readonly` set to `False`. If :py:attr:`type` is `choice`, the value must be one of the :py:attr:`choices`. If :py:attr:`type` is `range`, the value must be in the range described by :py:attr:`range`. :param value: Value to set
[ "Update", "value", "of", "the", "option", "." ]
2876d15a58174bd24613cd4106a3ef0cefd48050
https://github.com/jbaiter/gphoto2-cffi/blob/2876d15a58174bd24613cd4106a3ef0cefd48050/gphoto2cffi/gphoto2.py#L468-L511
8,745
jbaiter/gphoto2-cffi
gphoto2cffi/gphoto2.py
Camera.supported_operations
def supported_operations(self): """ All operations supported by the camera. """ return tuple(op for op in backend.CAM_OPS if self._abilities.operations & op)
python
def supported_operations(self): return tuple(op for op in backend.CAM_OPS if self._abilities.operations & op)
[ "def", "supported_operations", "(", "self", ")", ":", "return", "tuple", "(", "op", "for", "op", "in", "backend", ".", "CAM_OPS", "if", "self", ".", "_abilities", ".", "operations", "&", "op", ")" ]
All operations supported by the camera.
[ "All", "operations", "supported", "by", "the", "camera", "." ]
2876d15a58174bd24613cd4106a3ef0cefd48050
https://github.com/jbaiter/gphoto2-cffi/blob/2876d15a58174bd24613cd4106a3ef0cefd48050/gphoto2cffi/gphoto2.py#L571-L574
8,746
jbaiter/gphoto2-cffi
gphoto2cffi/gphoto2.py
Camera.usb_info
def usb_info(self): """ The camera's USB information. """ return UsbInformation(self._abilities.usb_vendor, self._abilities.usb_product, self._abilities.usb_class, self._abilities.usb_subclass, self._abilities.usb_protocol)
python
def usb_info(self): return UsbInformation(self._abilities.usb_vendor, self._abilities.usb_product, self._abilities.usb_class, self._abilities.usb_subclass, self._abilities.usb_protocol)
[ "def", "usb_info", "(", "self", ")", ":", "return", "UsbInformation", "(", "self", ".", "_abilities", ".", "usb_vendor", ",", "self", ".", "_abilities", ".", "usb_product", ",", "self", ".", "_abilities", ".", "usb_class", ",", "self", ".", "_abilities", "...
The camera's USB information.
[ "The", "camera", "s", "USB", "information", "." ]
2876d15a58174bd24613cd4106a3ef0cefd48050
https://github.com/jbaiter/gphoto2-cffi/blob/2876d15a58174bd24613cd4106a3ef0cefd48050/gphoto2cffi/gphoto2.py#L577-L583
8,747
jbaiter/gphoto2-cffi
gphoto2cffi/gphoto2.py
Camera.config
def config(self): """ Writeable configuration parameters. :rtype: dict """ config = self._get_config() return {section: {itm.name: itm for itm in config[section].values() if not itm.readonly} for section in config if 'settings' in section or section == 'other'}
python
def config(self): config = self._get_config() return {section: {itm.name: itm for itm in config[section].values() if not itm.readonly} for section in config if 'settings' in section or section == 'other'}
[ "def", "config", "(", "self", ")", ":", "config", "=", "self", ".", "_get_config", "(", ")", "return", "{", "section", ":", "{", "itm", ".", "name", ":", "itm", "for", "itm", "in", "config", "[", "section", "]", ".", "values", "(", ")", "if", "no...
Writeable configuration parameters. :rtype: dict
[ "Writeable", "configuration", "parameters", "." ]
2876d15a58174bd24613cd4106a3ef0cefd48050
https://github.com/jbaiter/gphoto2-cffi/blob/2876d15a58174bd24613cd4106a3ef0cefd48050/gphoto2cffi/gphoto2.py#L591-L600
8,748
jbaiter/gphoto2-cffi
gphoto2cffi/gphoto2.py
Camera.storage_info
def storage_info(self): """ Information about the camera's storage. """ info_p = ffi.new("CameraStorageInformation**") num_info_p = ffi.new("int*") lib.gp_camera_get_storageinfo(self._cam, info_p, num_info_p, self._ctx) infos = [] for idx in range(num_info_p[0]): out = SimpleNamespace() struc = (info_p[0] + idx) fields = struc.fields if lib.GP_STORAGEINFO_BASE & fields: out.directory = next( (d for d in self.list_all_directories() if d.path == ffi.string(struc.basedir).decode()), None) if lib.GP_STORAGEINFO_LABEL & fields: out.label = ffi.string(struc.label).decode() if lib.GP_STORAGEINFO_DESCRIPTION & fields: out.description = ffi.string(struc.description).decode() if lib.GP_STORAGEINFO_STORAGETYPE & fields: stype = struc.type if lib.GP_STORAGEINFO_ST_FIXED_ROM & stype: out.type = 'fixed_rom' elif lib.GP_STORAGEINFO_ST_REMOVABLE_ROM & stype: out.type = 'removable_rom' elif lib.GP_STORAGEINFO_ST_FIXED_RAM & stype: out.type = 'fixed_ram' elif lib.GP_STORAGEINFO_ST_REMOVABLE_RAM & stype: out.type = 'removable_ram' else: out.type = 'unknown' if lib.GP_STORAGEINFO_ACCESS & fields: if lib.GP_STORAGEINFO_AC_READWRITE & struc.access: out.access = 'read-write' elif lib.GP_STORAGEINFO_AC_READONLY & struc.access: out.access = 'read-only' elif lib.GP_STORAGEINFO_AC_READONLY_WITH_DELETE & struc.access: out.access = 'read-delete' if lib.GP_STORAGEINFO_MAXCAPACITY & fields: out.capacity = int(struc.capacitykbytes) if lib.GP_STORAGEINFO_FREESPACEKBYTES & fields: out.free_space = int(struc.freekbytes) if lib.GP_STORAGEINFO_FREESPACEIMAGES & fields: out.remaining_images = int(struc.freeimages) infos.append(out) return infos
python
def storage_info(self): info_p = ffi.new("CameraStorageInformation**") num_info_p = ffi.new("int*") lib.gp_camera_get_storageinfo(self._cam, info_p, num_info_p, self._ctx) infos = [] for idx in range(num_info_p[0]): out = SimpleNamespace() struc = (info_p[0] + idx) fields = struc.fields if lib.GP_STORAGEINFO_BASE & fields: out.directory = next( (d for d in self.list_all_directories() if d.path == ffi.string(struc.basedir).decode()), None) if lib.GP_STORAGEINFO_LABEL & fields: out.label = ffi.string(struc.label).decode() if lib.GP_STORAGEINFO_DESCRIPTION & fields: out.description = ffi.string(struc.description).decode() if lib.GP_STORAGEINFO_STORAGETYPE & fields: stype = struc.type if lib.GP_STORAGEINFO_ST_FIXED_ROM & stype: out.type = 'fixed_rom' elif lib.GP_STORAGEINFO_ST_REMOVABLE_ROM & stype: out.type = 'removable_rom' elif lib.GP_STORAGEINFO_ST_FIXED_RAM & stype: out.type = 'fixed_ram' elif lib.GP_STORAGEINFO_ST_REMOVABLE_RAM & stype: out.type = 'removable_ram' else: out.type = 'unknown' if lib.GP_STORAGEINFO_ACCESS & fields: if lib.GP_STORAGEINFO_AC_READWRITE & struc.access: out.access = 'read-write' elif lib.GP_STORAGEINFO_AC_READONLY & struc.access: out.access = 'read-only' elif lib.GP_STORAGEINFO_AC_READONLY_WITH_DELETE & struc.access: out.access = 'read-delete' if lib.GP_STORAGEINFO_MAXCAPACITY & fields: out.capacity = int(struc.capacitykbytes) if lib.GP_STORAGEINFO_FREESPACEKBYTES & fields: out.free_space = int(struc.freekbytes) if lib.GP_STORAGEINFO_FREESPACEIMAGES & fields: out.remaining_images = int(struc.freeimages) infos.append(out) return infos
[ "def", "storage_info", "(", "self", ")", ":", "info_p", "=", "ffi", ".", "new", "(", "\"CameraStorageInformation**\"", ")", "num_info_p", "=", "ffi", ".", "new", "(", "\"int*\"", ")", "lib", ".", "gp_camera_get_storageinfo", "(", "self", ".", "_cam", ",", ...
Information about the camera's storage.
[ "Information", "about", "the", "camera", "s", "storage", "." ]
2876d15a58174bd24613cd4106a3ef0cefd48050
https://github.com/jbaiter/gphoto2-cffi/blob/2876d15a58174bd24613cd4106a3ef0cefd48050/gphoto2cffi/gphoto2.py#L625-L670
8,749
jbaiter/gphoto2-cffi
gphoto2cffi/gphoto2.py
Camera.list_all_files
def list_all_files(self): """ Utility method that yields all files on the device's file systems. """ def list_files_recursively(directory): f_gen = itertools.chain( directory.files, *tuple(list_files_recursively(d) for d in directory.directories)) for f in f_gen: yield f return list_files_recursively(self.filesystem)
python
def list_all_files(self): def list_files_recursively(directory): f_gen = itertools.chain( directory.files, *tuple(list_files_recursively(d) for d in directory.directories)) for f in f_gen: yield f return list_files_recursively(self.filesystem)
[ "def", "list_all_files", "(", "self", ")", ":", "def", "list_files_recursively", "(", "directory", ")", ":", "f_gen", "=", "itertools", ".", "chain", "(", "directory", ".", "files", ",", "*", "tuple", "(", "list_files_recursively", "(", "d", ")", "for", "d...
Utility method that yields all files on the device's file systems.
[ "Utility", "method", "that", "yields", "all", "files", "on", "the", "device", "s", "file", "systems", "." ]
2876d15a58174bd24613cd4106a3ef0cefd48050
https://github.com/jbaiter/gphoto2-cffi/blob/2876d15a58174bd24613cd4106a3ef0cefd48050/gphoto2cffi/gphoto2.py#L672-L683
8,750
jbaiter/gphoto2-cffi
gphoto2cffi/gphoto2.py
Camera.list_all_directories
def list_all_directories(self): """ Utility method that yields all directories on the device's file systems. """ def list_dirs_recursively(directory): if directory == self.filesystem: yield directory d_gen = itertools.chain( directory.directories, *tuple(list_dirs_recursively(d) for d in directory.directories)) for d in d_gen: yield d return list_dirs_recursively(self.filesystem)
python
def list_all_directories(self): def list_dirs_recursively(directory): if directory == self.filesystem: yield directory d_gen = itertools.chain( directory.directories, *tuple(list_dirs_recursively(d) for d in directory.directories)) for d in d_gen: yield d return list_dirs_recursively(self.filesystem)
[ "def", "list_all_directories", "(", "self", ")", ":", "def", "list_dirs_recursively", "(", "directory", ")", ":", "if", "directory", "==", "self", ".", "filesystem", ":", "yield", "directory", "d_gen", "=", "itertools", ".", "chain", "(", "directory", ".", "...
Utility method that yields all directories on the device's file systems.
[ "Utility", "method", "that", "yields", "all", "directories", "on", "the", "device", "s", "file", "systems", "." ]
2876d15a58174bd24613cd4106a3ef0cefd48050
https://github.com/jbaiter/gphoto2-cffi/blob/2876d15a58174bd24613cd4106a3ef0cefd48050/gphoto2cffi/gphoto2.py#L685-L698
8,751
jbaiter/gphoto2-cffi
gphoto2cffi/gphoto2.py
Camera.capture
def capture(self, to_camera_storage=False): """ Capture an image. Some cameras (mostly Canon and Nikon) support capturing to internal RAM. On these devices, you have to specify `to_camera_storage` if you want to save the images to the memory card. On devices that do not support saving to RAM, the only difference is that the file is automatically downloaded and deleted when set to `False`. :param to_camera_storage: Save image to the camera's internal storage :type to_camera_storage: bool :return: A :py:class:`File` if `to_camera_storage` was `True`, otherwise the captured image as a bytestring. :rtype: :py:class:`File` or bytes """ target = self.config['settings']['capturetarget'] if to_camera_storage and target.value != "Memory card": target.set("Memory card") elif not to_camera_storage and target.value != "Internal RAM": target.set("Internal RAM") lib.gp_camera_trigger_capture(self._cam, self._ctx) fobj = self._wait_for_event(event_type=lib.GP_EVENT_FILE_ADDED) if to_camera_storage: self._logger.info("File written to storage at {0}.".format(fobj)) return fobj else: data = fobj.get_data() try: fobj.remove() except errors.CameraIOError: # That probably means the file is already gone from RAM, # so nothing to worry about. pass return data
python
def capture(self, to_camera_storage=False): target = self.config['settings']['capturetarget'] if to_camera_storage and target.value != "Memory card": target.set("Memory card") elif not to_camera_storage and target.value != "Internal RAM": target.set("Internal RAM") lib.gp_camera_trigger_capture(self._cam, self._ctx) fobj = self._wait_for_event(event_type=lib.GP_EVENT_FILE_ADDED) if to_camera_storage: self._logger.info("File written to storage at {0}.".format(fobj)) return fobj else: data = fobj.get_data() try: fobj.remove() except errors.CameraIOError: # That probably means the file is already gone from RAM, # so nothing to worry about. pass return data
[ "def", "capture", "(", "self", ",", "to_camera_storage", "=", "False", ")", ":", "target", "=", "self", ".", "config", "[", "'settings'", "]", "[", "'capturetarget'", "]", "if", "to_camera_storage", "and", "target", ".", "value", "!=", "\"Memory card\"", ":"...
Capture an image. Some cameras (mostly Canon and Nikon) support capturing to internal RAM. On these devices, you have to specify `to_camera_storage` if you want to save the images to the memory card. On devices that do not support saving to RAM, the only difference is that the file is automatically downloaded and deleted when set to `False`. :param to_camera_storage: Save image to the camera's internal storage :type to_camera_storage: bool :return: A :py:class:`File` if `to_camera_storage` was `True`, otherwise the captured image as a bytestring. :rtype: :py:class:`File` or bytes
[ "Capture", "an", "image", "." ]
2876d15a58174bd24613cd4106a3ef0cefd48050
https://github.com/jbaiter/gphoto2-cffi/blob/2876d15a58174bd24613cd4106a3ef0cefd48050/gphoto2cffi/gphoto2.py#L701-L735
8,752
jbaiter/gphoto2-cffi
gphoto2cffi/gphoto2.py
Camera.capture_video
def capture_video(self, length): """ Capture a video. This always writes to the memory card, since internal RAM is likely to run out of space very quickly. Currently this only works with Nikon cameras. :param length: Length of the video to capture in seconds. :type length: int :return: Video file :rtype: :py:class:`File` """ with self.capture_video_context() as ctx: time.sleep(length) return ctx.videofile
python
def capture_video(self, length): with self.capture_video_context() as ctx: time.sleep(length) return ctx.videofile
[ "def", "capture_video", "(", "self", ",", "length", ")", ":", "with", "self", ".", "capture_video_context", "(", ")", "as", "ctx", ":", "time", ".", "sleep", "(", "length", ")", "return", "ctx", ".", "videofile" ]
Capture a video. This always writes to the memory card, since internal RAM is likely to run out of space very quickly. Currently this only works with Nikon cameras. :param length: Length of the video to capture in seconds. :type length: int :return: Video file :rtype: :py:class:`File`
[ "Capture", "a", "video", "." ]
2876d15a58174bd24613cd4106a3ef0cefd48050
https://github.com/jbaiter/gphoto2-cffi/blob/2876d15a58174bd24613cd4106a3ef0cefd48050/gphoto2cffi/gphoto2.py#L747-L762
8,753
jbaiter/gphoto2-cffi
gphoto2cffi/gphoto2.py
Camera.get_preview
def get_preview(self): """ Get a preview from the camera's viewport. This will usually be a JPEG image with the dimensions depending on the camera. You will need to call the exit() method manually after you are done capturing a live preview. :return: The preview image as a bytestring :rtype: bytes """ lib.gp_camera_capture_preview(self._cam, self.__camfile_p[0], self._ctx) lib.gp_file_get_data_and_size(self.__camfile_p[0], self.__data_p, self.__length_p) return ffi.buffer(self.__data_p[0], self.__length_p[0])[:]
python
def get_preview(self): lib.gp_camera_capture_preview(self._cam, self.__camfile_p[0], self._ctx) lib.gp_file_get_data_and_size(self.__camfile_p[0], self.__data_p, self.__length_p) return ffi.buffer(self.__data_p[0], self.__length_p[0])[:]
[ "def", "get_preview", "(", "self", ")", ":", "lib", ".", "gp_camera_capture_preview", "(", "self", ".", "_cam", ",", "self", ".", "__camfile_p", "[", "0", "]", ",", "self", ".", "_ctx", ")", "lib", ".", "gp_file_get_data_and_size", "(", "self", ".", "__c...
Get a preview from the camera's viewport. This will usually be a JPEG image with the dimensions depending on the camera. You will need to call the exit() method manually after you are done capturing a live preview. :return: The preview image as a bytestring :rtype: bytes
[ "Get", "a", "preview", "from", "the", "camera", "s", "viewport", "." ]
2876d15a58174bd24613cd4106a3ef0cefd48050
https://github.com/jbaiter/gphoto2-cffi/blob/2876d15a58174bd24613cd4106a3ef0cefd48050/gphoto2cffi/gphoto2.py#L764-L776
8,754
jazzband/django-queued-storage
queued_storage/backends.py
QueuedStorage.transfer
def transfer(self, name, cache_key=None): """ Transfers the file with the given name to the remote storage backend by queuing the task. :param name: file name :type name: str :param cache_key: the cache key to set after a successful task run :type cache_key: str :rtype: task result """ if cache_key is None: cache_key = self.get_cache_key(name) return self.task.delay(name, cache_key, self.local_path, self.remote_path, self.local_options, self.remote_options)
python
def transfer(self, name, cache_key=None): if cache_key is None: cache_key = self.get_cache_key(name) return self.task.delay(name, cache_key, self.local_path, self.remote_path, self.local_options, self.remote_options)
[ "def", "transfer", "(", "self", ",", "name", ",", "cache_key", "=", "None", ")", ":", "if", "cache_key", "is", "None", ":", "cache_key", "=", "self", ".", "get_cache_key", "(", "name", ")", "return", "self", ".", "task", ".", "delay", "(", "name", ",...
Transfers the file with the given name to the remote storage backend by queuing the task. :param name: file name :type name: str :param cache_key: the cache key to set after a successful task run :type cache_key: str :rtype: task result
[ "Transfers", "the", "file", "with", "the", "given", "name", "to", "the", "remote", "storage", "backend", "by", "queuing", "the", "task", "." ]
f8225d88a01ef5ca8001aeb3f7f80818a022a12d
https://github.com/jazzband/django-queued-storage/blob/f8225d88a01ef5ca8001aeb3f7f80818a022a12d/queued_storage/backends.py#L206-L221
8,755
jazzband/django-queued-storage
queued_storage/backends.py
QueuedStorage.get_available_name
def get_available_name(self, name): """ Returns a filename that's free on both the local and remote storage systems, and available for new content to be written to. :param name: file name :type name: str :rtype: str """ local_available_name = self.local.get_available_name(name) remote_available_name = self.remote.get_available_name(name) if remote_available_name > local_available_name: return remote_available_name return local_available_name
python
def get_available_name(self, name): local_available_name = self.local.get_available_name(name) remote_available_name = self.remote.get_available_name(name) if remote_available_name > local_available_name: return remote_available_name return local_available_name
[ "def", "get_available_name", "(", "self", ",", "name", ")", ":", "local_available_name", "=", "self", ".", "local", ".", "get_available_name", "(", "name", ")", "remote_available_name", "=", "self", ".", "remote", ".", "get_available_name", "(", "name", ")", "...
Returns a filename that's free on both the local and remote storage systems, and available for new content to be written to. :param name: file name :type name: str :rtype: str
[ "Returns", "a", "filename", "that", "s", "free", "on", "both", "the", "local", "and", "remote", "storage", "systems", "and", "available", "for", "new", "content", "to", "be", "written", "to", "." ]
f8225d88a01ef5ca8001aeb3f7f80818a022a12d
https://github.com/jazzband/django-queued-storage/blob/f8225d88a01ef5ca8001aeb3f7f80818a022a12d/queued_storage/backends.py#L234-L248
8,756
mongolab/dex
dex/analyzer.py
QueryAnalyzer.generate_query_report
def generate_query_report(self, db_uri, parsed_query, db_name, collection_name): """Generates a comprehensive report on the raw query""" index_analysis = None recommendation = None namespace = parsed_query['ns'] indexStatus = "unknown" index_cache_entry = self._ensure_index_cache(db_uri, db_name, collection_name) query_analysis = self._generate_query_analysis(parsed_query, db_name, collection_name) if ((query_analysis['analyzedFields'] != []) and query_analysis['supported']): index_analysis = self._generate_index_analysis(query_analysis, index_cache_entry['indexes']) indexStatus = index_analysis['indexStatus'] if index_analysis['indexStatus'] != 'full': recommendation = self._generate_recommendation(query_analysis, db_name, collection_name) # a temporary fix to suppress faulty parsing of $regexes. # if the recommendation cannot be re-parsed into yaml, we assume # it is invalid. if not validate_yaml(recommendation['index']): recommendation = None query_analysis['supported'] = False # QUERY REPORT return OrderedDict({ 'queryMask': parsed_query['queryMask'], 'indexStatus': indexStatus, 'parsed': parsed_query, 'namespace': namespace, 'queryAnalysis': query_analysis, 'indexAnalysis': index_analysis, 'recommendation': recommendation })
python
def generate_query_report(self, db_uri, parsed_query, db_name, collection_name): index_analysis = None recommendation = None namespace = parsed_query['ns'] indexStatus = "unknown" index_cache_entry = self._ensure_index_cache(db_uri, db_name, collection_name) query_analysis = self._generate_query_analysis(parsed_query, db_name, collection_name) if ((query_analysis['analyzedFields'] != []) and query_analysis['supported']): index_analysis = self._generate_index_analysis(query_analysis, index_cache_entry['indexes']) indexStatus = index_analysis['indexStatus'] if index_analysis['indexStatus'] != 'full': recommendation = self._generate_recommendation(query_analysis, db_name, collection_name) # a temporary fix to suppress faulty parsing of $regexes. # if the recommendation cannot be re-parsed into yaml, we assume # it is invalid. if not validate_yaml(recommendation['index']): recommendation = None query_analysis['supported'] = False # QUERY REPORT return OrderedDict({ 'queryMask': parsed_query['queryMask'], 'indexStatus': indexStatus, 'parsed': parsed_query, 'namespace': namespace, 'queryAnalysis': query_analysis, 'indexAnalysis': index_analysis, 'recommendation': recommendation })
[ "def", "generate_query_report", "(", "self", ",", "db_uri", ",", "parsed_query", ",", "db_name", ",", "collection_name", ")", ":", "index_analysis", "=", "None", "recommendation", "=", "None", "namespace", "=", "parsed_query", "[", "'ns'", "]", "indexStatus", "=...
Generates a comprehensive report on the raw query
[ "Generates", "a", "comprehensive", "report", "on", "the", "raw", "query" ]
f6dc27321028ef1ffdb3d4b1165fdcce7c8f20aa
https://github.com/mongolab/dex/blob/f6dc27321028ef1ffdb3d4b1165fdcce7c8f20aa/dex/analyzer.py#L47-L88
8,757
mongolab/dex
dex/analyzer.py
QueryAnalyzer._ensure_index_cache
def _ensure_index_cache(self, db_uri, db_name, collection_name): """Adds a collections index entries to the cache if not present""" if not self._check_indexes or db_uri is None: return {'indexes': None} if db_name not in self.get_cache(): self._internal_map[db_name] = {} if collection_name not in self._internal_map[db_name]: indexes = [] try: if self._index_cache_connection is None: self._index_cache_connection = pymongo.MongoClient(db_uri, document_class=OrderedDict, read_preference=pymongo.ReadPreference.PRIMARY_PREFERRED) db = self._index_cache_connection[db_name] indexes = db[collection_name].index_information() except: warning = 'Warning: unable to connect to ' + db_uri + "\n" else: internal_map_entry = {'indexes': indexes} self.get_cache()[db_name][collection_name] = internal_map_entry return self.get_cache()[db_name][collection_name]
python
def _ensure_index_cache(self, db_uri, db_name, collection_name): if not self._check_indexes or db_uri is None: return {'indexes': None} if db_name not in self.get_cache(): self._internal_map[db_name] = {} if collection_name not in self._internal_map[db_name]: indexes = [] try: if self._index_cache_connection is None: self._index_cache_connection = pymongo.MongoClient(db_uri, document_class=OrderedDict, read_preference=pymongo.ReadPreference.PRIMARY_PREFERRED) db = self._index_cache_connection[db_name] indexes = db[collection_name].index_information() except: warning = 'Warning: unable to connect to ' + db_uri + "\n" else: internal_map_entry = {'indexes': indexes} self.get_cache()[db_name][collection_name] = internal_map_entry return self.get_cache()[db_name][collection_name]
[ "def", "_ensure_index_cache", "(", "self", ",", "db_uri", ",", "db_name", ",", "collection_name", ")", ":", "if", "not", "self", ".", "_check_indexes", "or", "db_uri", "is", "None", ":", "return", "{", "'indexes'", ":", "None", "}", "if", "db_name", "not",...
Adds a collections index entries to the cache if not present
[ "Adds", "a", "collections", "index", "entries", "to", "the", "cache", "if", "not", "present" ]
f6dc27321028ef1ffdb3d4b1165fdcce7c8f20aa
https://github.com/mongolab/dex/blob/f6dc27321028ef1ffdb3d4b1165fdcce7c8f20aa/dex/analyzer.py#L91-L112
8,758
mongolab/dex
dex/analyzer.py
QueryAnalyzer._generate_query_analysis
def _generate_query_analysis(self, parsed_query, db_name, collection_name): """Translates a raw query object into a Dex query analysis""" analyzed_fields = [] field_count = 0 supported = True sort_fields = [] query_mask = None if 'command' in parsed_query and parsed_query['command'] not in SUPPORTED_COMMANDS: supported = False else: #if 'orderby' in parsed_query: sort_component = parsed_query['orderby'] if 'orderby' in parsed_query else [] sort_seq = 0 for key in sort_component: sort_field = {'fieldName': key, 'fieldType': SORT_TYPE, 'seq': sort_seq} sort_fields.append(key) analyzed_fields.append(sort_field) field_count += 1 sort_seq += 1 query_component = parsed_query['query'] if 'query' in parsed_query else {} for key in query_component: if key not in sort_fields: field_type = UNSUPPORTED_TYPE if ((key not in UNSUPPORTED_QUERY_OPERATORS) and (key not in COMPOSITE_QUERY_OPERATORS)): try: if query_component[key] == {}: raise nested_field_list = query_component[key].keys() except: field_type = EQUIV_TYPE else: for nested_field in nested_field_list: if ((nested_field in RANGE_QUERY_OPERATORS) and (nested_field not in UNSUPPORTED_QUERY_OPERATORS)): field_type = RANGE_TYPE else: supported = False field_type = UNSUPPORTED_TYPE break if field_type is UNSUPPORTED_TYPE: supported = False analyzed_field = {'fieldName': key, 'fieldType': field_type} analyzed_fields.append(analyzed_field) field_count += 1 query_mask = parsed_query['queryMask'] # QUERY ANALYSIS return OrderedDict({ 'analyzedFields': analyzed_fields, 'fieldCount': field_count, 'supported': supported, 'queryMask': query_mask })
python
def _generate_query_analysis(self, parsed_query, db_name, collection_name): analyzed_fields = [] field_count = 0 supported = True sort_fields = [] query_mask = None if 'command' in parsed_query and parsed_query['command'] not in SUPPORTED_COMMANDS: supported = False else: #if 'orderby' in parsed_query: sort_component = parsed_query['orderby'] if 'orderby' in parsed_query else [] sort_seq = 0 for key in sort_component: sort_field = {'fieldName': key, 'fieldType': SORT_TYPE, 'seq': sort_seq} sort_fields.append(key) analyzed_fields.append(sort_field) field_count += 1 sort_seq += 1 query_component = parsed_query['query'] if 'query' in parsed_query else {} for key in query_component: if key not in sort_fields: field_type = UNSUPPORTED_TYPE if ((key not in UNSUPPORTED_QUERY_OPERATORS) and (key not in COMPOSITE_QUERY_OPERATORS)): try: if query_component[key] == {}: raise nested_field_list = query_component[key].keys() except: field_type = EQUIV_TYPE else: for nested_field in nested_field_list: if ((nested_field in RANGE_QUERY_OPERATORS) and (nested_field not in UNSUPPORTED_QUERY_OPERATORS)): field_type = RANGE_TYPE else: supported = False field_type = UNSUPPORTED_TYPE break if field_type is UNSUPPORTED_TYPE: supported = False analyzed_field = {'fieldName': key, 'fieldType': field_type} analyzed_fields.append(analyzed_field) field_count += 1 query_mask = parsed_query['queryMask'] # QUERY ANALYSIS return OrderedDict({ 'analyzedFields': analyzed_fields, 'fieldCount': field_count, 'supported': supported, 'queryMask': query_mask })
[ "def", "_generate_query_analysis", "(", "self", ",", "parsed_query", ",", "db_name", ",", "collection_name", ")", ":", "analyzed_fields", "=", "[", "]", "field_count", "=", "0", "supported", "=", "True", "sort_fields", "=", "[", "]", "query_mask", "=", "None",...
Translates a raw query object into a Dex query analysis
[ "Translates", "a", "raw", "query", "object", "into", "a", "Dex", "query", "analysis" ]
f6dc27321028ef1ffdb3d4b1165fdcce7c8f20aa
https://github.com/mongolab/dex/blob/f6dc27321028ef1ffdb3d4b1165fdcce7c8f20aa/dex/analyzer.py#L115-L177
8,759
mongolab/dex
dex/analyzer.py
QueryAnalyzer._generate_index_analysis
def _generate_index_analysis(self, query_analysis, indexes): """Compares a query signature to the index cache to identify complete and partial indexes available to the query""" needs_recommendation = True full_indexes = [] partial_indexes = [] coverage = "unknown" if indexes is not None: for index_key in indexes.keys(): index = indexes[index_key] index_report = self._generate_index_report(index, query_analysis) if index_report['supported'] is True: if index_report['coverage'] == 'full': full_indexes.append(index_report) if index_report['idealOrder']: needs_recommendation = False elif index_report['coverage'] == 'partial': partial_indexes.append(index_report) if len(full_indexes) > 0: coverage = "full" elif (len(partial_indexes)) > 0: coverage = "partial" elif query_analysis['supported']: coverage = "none" # INDEX ANALYSIS return OrderedDict([('indexStatus', coverage), ('fullIndexes', full_indexes), ('partialIndexes', partial_indexes)])
python
def _generate_index_analysis(self, query_analysis, indexes): needs_recommendation = True full_indexes = [] partial_indexes = [] coverage = "unknown" if indexes is not None: for index_key in indexes.keys(): index = indexes[index_key] index_report = self._generate_index_report(index, query_analysis) if index_report['supported'] is True: if index_report['coverage'] == 'full': full_indexes.append(index_report) if index_report['idealOrder']: needs_recommendation = False elif index_report['coverage'] == 'partial': partial_indexes.append(index_report) if len(full_indexes) > 0: coverage = "full" elif (len(partial_indexes)) > 0: coverage = "partial" elif query_analysis['supported']: coverage = "none" # INDEX ANALYSIS return OrderedDict([('indexStatus', coverage), ('fullIndexes', full_indexes), ('partialIndexes', partial_indexes)])
[ "def", "_generate_index_analysis", "(", "self", ",", "query_analysis", ",", "indexes", ")", ":", "needs_recommendation", "=", "True", "full_indexes", "=", "[", "]", "partial_indexes", "=", "[", "]", "coverage", "=", "\"unknown\"", "if", "indexes", "is", "not", ...
Compares a query signature to the index cache to identify complete and partial indexes available to the query
[ "Compares", "a", "query", "signature", "to", "the", "index", "cache", "to", "identify", "complete", "and", "partial", "indexes", "available", "to", "the", "query" ]
f6dc27321028ef1ffdb3d4b1165fdcce7c8f20aa
https://github.com/mongolab/dex/blob/f6dc27321028ef1ffdb3d4b1165fdcce7c8f20aa/dex/analyzer.py#L180-L211
8,760
mongolab/dex
dex/analyzer.py
QueryAnalyzer._generate_index_report
def _generate_index_report(self, index, query_analysis): """Analyzes an existing index against the results of query analysis""" all_fields = [] equiv_fields = [] sort_fields = [] range_fields = [] for query_field in query_analysis['analyzedFields']: all_fields.append(query_field['fieldName']) if query_field['fieldType'] is EQUIV_TYPE: equiv_fields.append(query_field['fieldName']) elif query_field['fieldType'] is SORT_TYPE: sort_fields.append(query_field['fieldName']) elif query_field['fieldType'] is RANGE_TYPE: range_fields.append(query_field['fieldName']) max_equiv_seq = len(equiv_fields) max_sort_seq = max_equiv_seq + len(sort_fields) max_range_seq = max_sort_seq + len(range_fields) coverage = 'none' query_fields_covered = 0 query_field_count = query_analysis['fieldCount'] supported = True ideal_order = True for index_field in index['key']: field_name = index_field[0] if index_field[1] == '2d': supported = False break if field_name not in all_fields: break if query_fields_covered == 0: coverage = 'partial' if query_fields_covered < max_equiv_seq: if field_name not in equiv_fields: ideal_order = False elif query_fields_covered < max_sort_seq: if field_name not in sort_fields: ideal_order = False elif query_fields_covered < max_range_seq: if field_name not in range_fields: ideal_order = False query_fields_covered += 1 if query_fields_covered == query_field_count: coverage = 'full' # INDEX REPORT return OrderedDict({ 'coverage': coverage, 'idealOrder': ideal_order, 'queryFieldsCovered': query_fields_covered, 'index': index, 'supported': supported })
python
def _generate_index_report(self, index, query_analysis): all_fields = [] equiv_fields = [] sort_fields = [] range_fields = [] for query_field in query_analysis['analyzedFields']: all_fields.append(query_field['fieldName']) if query_field['fieldType'] is EQUIV_TYPE: equiv_fields.append(query_field['fieldName']) elif query_field['fieldType'] is SORT_TYPE: sort_fields.append(query_field['fieldName']) elif query_field['fieldType'] is RANGE_TYPE: range_fields.append(query_field['fieldName']) max_equiv_seq = len(equiv_fields) max_sort_seq = max_equiv_seq + len(sort_fields) max_range_seq = max_sort_seq + len(range_fields) coverage = 'none' query_fields_covered = 0 query_field_count = query_analysis['fieldCount'] supported = True ideal_order = True for index_field in index['key']: field_name = index_field[0] if index_field[1] == '2d': supported = False break if field_name not in all_fields: break if query_fields_covered == 0: coverage = 'partial' if query_fields_covered < max_equiv_seq: if field_name not in equiv_fields: ideal_order = False elif query_fields_covered < max_sort_seq: if field_name not in sort_fields: ideal_order = False elif query_fields_covered < max_range_seq: if field_name not in range_fields: ideal_order = False query_fields_covered += 1 if query_fields_covered == query_field_count: coverage = 'full' # INDEX REPORT return OrderedDict({ 'coverage': coverage, 'idealOrder': ideal_order, 'queryFieldsCovered': query_fields_covered, 'index': index, 'supported': supported })
[ "def", "_generate_index_report", "(", "self", ",", "index", ",", "query_analysis", ")", ":", "all_fields", "=", "[", "]", "equiv_fields", "=", "[", "]", "sort_fields", "=", "[", "]", "range_fields", "=", "[", "]", "for", "query_field", "in", "query_analysis"...
Analyzes an existing index against the results of query analysis
[ "Analyzes", "an", "existing", "index", "against", "the", "results", "of", "query", "analysis" ]
f6dc27321028ef1ffdb3d4b1165fdcce7c8f20aa
https://github.com/mongolab/dex/blob/f6dc27321028ef1ffdb3d4b1165fdcce7c8f20aa/dex/analyzer.py#L214-L273
8,761
mongolab/dex
dex/analyzer.py
QueryAnalyzer._generate_recommendation
def _generate_recommendation(self, query_analysis, db_name, collection_name): """Generates an ideal query recommendation""" index_rec = '{' for query_field in query_analysis['analyzedFields']: if query_field['fieldType'] is EQUIV_TYPE: if len(index_rec) is not 1: index_rec += ', ' index_rec += '"' + query_field['fieldName'] + '": 1' for query_field in query_analysis['analyzedFields']: if query_field['fieldType'] is SORT_TYPE: if len(index_rec) is not 1: index_rec += ', ' index_rec += '"' + query_field['fieldName'] + '": 1' for query_field in query_analysis['analyzedFields']: if query_field['fieldType'] is RANGE_TYPE: if len(index_rec) is not 1: index_rec += ', ' index_rec += '"' + query_field['fieldName'] + '": 1' index_rec += '}' # RECOMMENDATION return OrderedDict([('index',index_rec), ('shellCommand', self.generate_shell_command(collection_name, index_rec))])
python
def _generate_recommendation(self, query_analysis, db_name, collection_name): index_rec = '{' for query_field in query_analysis['analyzedFields']: if query_field['fieldType'] is EQUIV_TYPE: if len(index_rec) is not 1: index_rec += ', ' index_rec += '"' + query_field['fieldName'] + '": 1' for query_field in query_analysis['analyzedFields']: if query_field['fieldType'] is SORT_TYPE: if len(index_rec) is not 1: index_rec += ', ' index_rec += '"' + query_field['fieldName'] + '": 1' for query_field in query_analysis['analyzedFields']: if query_field['fieldType'] is RANGE_TYPE: if len(index_rec) is not 1: index_rec += ', ' index_rec += '"' + query_field['fieldName'] + '": 1' index_rec += '}' # RECOMMENDATION return OrderedDict([('index',index_rec), ('shellCommand', self.generate_shell_command(collection_name, index_rec))])
[ "def", "_generate_recommendation", "(", "self", ",", "query_analysis", ",", "db_name", ",", "collection_name", ")", ":", "index_rec", "=", "'{'", "for", "query_field", "in", "query_analysis", "[", "'analyzedFields'", "]", ":", "if", "query_field", "[", "'fieldType...
Generates an ideal query recommendation
[ "Generates", "an", "ideal", "query", "recommendation" ]
f6dc27321028ef1ffdb3d4b1165fdcce7c8f20aa
https://github.com/mongolab/dex/blob/f6dc27321028ef1ffdb3d4b1165fdcce7c8f20aa/dex/analyzer.py#L276-L301
8,762
mongolab/dex
dex/analyzer.py
ReportAggregation.add_query_occurrence
def add_query_occurrence(self, report): """Adds a report to the report aggregation""" initial_millis = int(report['parsed']['stats']['millis']) mask = report['queryMask'] existing_report = self._get_existing_report(mask, report) if existing_report is not None: self._merge_report(existing_report, report) else: time = None if 'ts' in report['parsed']: time = report['parsed']['ts'] self._reports.append(OrderedDict([ ('namespace', report['namespace']), ('lastSeenDate', time), ('queryMask', mask), ('supported', report['queryAnalysis']['supported']), ('indexStatus', report['indexStatus']), ('recommendation', report['recommendation']), ('stats', OrderedDict([('count', 1), ('totalTimeMillis', initial_millis), ('avgTimeMillis', initial_millis)]))]))
python
def add_query_occurrence(self, report): initial_millis = int(report['parsed']['stats']['millis']) mask = report['queryMask'] existing_report = self._get_existing_report(mask, report) if existing_report is not None: self._merge_report(existing_report, report) else: time = None if 'ts' in report['parsed']: time = report['parsed']['ts'] self._reports.append(OrderedDict([ ('namespace', report['namespace']), ('lastSeenDate', time), ('queryMask', mask), ('supported', report['queryAnalysis']['supported']), ('indexStatus', report['indexStatus']), ('recommendation', report['recommendation']), ('stats', OrderedDict([('count', 1), ('totalTimeMillis', initial_millis), ('avgTimeMillis', initial_millis)]))]))
[ "def", "add_query_occurrence", "(", "self", ",", "report", ")", ":", "initial_millis", "=", "int", "(", "report", "[", "'parsed'", "]", "[", "'stats'", "]", "[", "'millis'", "]", ")", "mask", "=", "report", "[", "'queryMask'", "]", "existing_report", "=", ...
Adds a report to the report aggregation
[ "Adds", "a", "report", "to", "the", "report", "aggregation" ]
f6dc27321028ef1ffdb3d4b1165fdcce7c8f20aa
https://github.com/mongolab/dex/blob/f6dc27321028ef1ffdb3d4b1165fdcce7c8f20aa/dex/analyzer.py#L327-L350
8,763
mongolab/dex
dex/analyzer.py
ReportAggregation.get_reports
def get_reports(self): """Returns a minimized version of the aggregation""" return sorted(self._reports, key=lambda x: x['stats']['totalTimeMillis'], reverse=True)
python
def get_reports(self): return sorted(self._reports, key=lambda x: x['stats']['totalTimeMillis'], reverse=True)
[ "def", "get_reports", "(", "self", ")", ":", "return", "sorted", "(", "self", ".", "_reports", ",", "key", "=", "lambda", "x", ":", "x", "[", "'stats'", "]", "[", "'totalTimeMillis'", "]", ",", "reverse", "=", "True", ")" ]
Returns a minimized version of the aggregation
[ "Returns", "a", "minimized", "version", "of", "the", "aggregation" ]
f6dc27321028ef1ffdb3d4b1165fdcce7c8f20aa
https://github.com/mongolab/dex/blob/f6dc27321028ef1ffdb3d4b1165fdcce7c8f20aa/dex/analyzer.py#L353-L357
8,764
mongolab/dex
dex/analyzer.py
ReportAggregation._get_existing_report
def _get_existing_report(self, mask, report): """Returns the aggregated report that matches report""" for existing_report in self._reports: if existing_report['namespace'] == report['namespace']: if mask == existing_report['queryMask']: return existing_report return None
python
def _get_existing_report(self, mask, report): for existing_report in self._reports: if existing_report['namespace'] == report['namespace']: if mask == existing_report['queryMask']: return existing_report return None
[ "def", "_get_existing_report", "(", "self", ",", "mask", ",", "report", ")", ":", "for", "existing_report", "in", "self", ".", "_reports", ":", "if", "existing_report", "[", "'namespace'", "]", "==", "report", "[", "'namespace'", "]", ":", "if", "mask", "=...
Returns the aggregated report that matches report
[ "Returns", "the", "aggregated", "report", "that", "matches", "report" ]
f6dc27321028ef1ffdb3d4b1165fdcce7c8f20aa
https://github.com/mongolab/dex/blob/f6dc27321028ef1ffdb3d4b1165fdcce7c8f20aa/dex/analyzer.py#L360-L366
8,765
mongolab/dex
dex/analyzer.py
ReportAggregation._merge_report
def _merge_report(self, target, new): """Merges a new report into the target report""" time = None if 'ts' in new['parsed']: time = new['parsed']['ts'] if (target.get('lastSeenDate', None) and time and target['lastSeenDate'] < time): target['lastSeenDate'] = time query_millis = int(new['parsed']['stats']['millis']) target['stats']['totalTimeMillis'] += query_millis target['stats']['count'] += 1 target['stats']['avgTimeMillis'] = target['stats']['totalTimeMillis'] / target['stats']['count']
python
def _merge_report(self, target, new): time = None if 'ts' in new['parsed']: time = new['parsed']['ts'] if (target.get('lastSeenDate', None) and time and target['lastSeenDate'] < time): target['lastSeenDate'] = time query_millis = int(new['parsed']['stats']['millis']) target['stats']['totalTimeMillis'] += query_millis target['stats']['count'] += 1 target['stats']['avgTimeMillis'] = target['stats']['totalTimeMillis'] / target['stats']['count']
[ "def", "_merge_report", "(", "self", ",", "target", ",", "new", ")", ":", "time", "=", "None", "if", "'ts'", "in", "new", "[", "'parsed'", "]", ":", "time", "=", "new", "[", "'parsed'", "]", "[", "'ts'", "]", "if", "(", "target", ".", "get", "(",...
Merges a new report into the target report
[ "Merges", "a", "new", "report", "into", "the", "target", "report" ]
f6dc27321028ef1ffdb3d4b1165fdcce7c8f20aa
https://github.com/mongolab/dex/blob/f6dc27321028ef1ffdb3d4b1165fdcce7c8f20aa/dex/analyzer.py#L369-L383
8,766
mongolab/dex
dex/parsers.py
Parser.parse
def parse(self, input): """Passes input to each QueryLineHandler in use""" query = None for handler in self._line_handlers: try: query = handler.handle(input) except Exception as e: query = None finally: if query is not None: return query return None
python
def parse(self, input): query = None for handler in self._line_handlers: try: query = handler.handle(input) except Exception as e: query = None finally: if query is not None: return query return None
[ "def", "parse", "(", "self", ",", "input", ")", ":", "query", "=", "None", "for", "handler", "in", "self", ".", "_line_handlers", ":", "try", ":", "query", "=", "handler", ".", "handle", "(", "input", ")", "except", "Exception", "as", "e", ":", "quer...
Passes input to each QueryLineHandler in use
[ "Passes", "input", "to", "each", "QueryLineHandler", "in", "use" ]
f6dc27321028ef1ffdb3d4b1165fdcce7c8f20aa
https://github.com/mongolab/dex/blob/f6dc27321028ef1ffdb3d4b1165fdcce7c8f20aa/dex/parsers.py#L67-L78
8,767
mongolab/dex
dex/dex.py
Dex.generate_query_report
def generate_query_report(self, db_uri, query, db_name, collection_name): """Analyzes a single query""" return self._query_analyzer.generate_query_report(db_uri, query, db_name, collection_name)
python
def generate_query_report(self, db_uri, query, db_name, collection_name): return self._query_analyzer.generate_query_report(db_uri, query, db_name, collection_name)
[ "def", "generate_query_report", "(", "self", ",", "db_uri", ",", "query", ",", "db_name", ",", "collection_name", ")", ":", "return", "self", ".", "_query_analyzer", ".", "generate_query_report", "(", "db_uri", ",", "query", ",", "db_name", ",", "collection_name...
Analyzes a single query
[ "Analyzes", "a", "single", "query" ]
f6dc27321028ef1ffdb3d4b1165fdcce7c8f20aa
https://github.com/mongolab/dex/blob/f6dc27321028ef1ffdb3d4b1165fdcce7c8f20aa/dex/dex.py#L77-L82
8,768
mongolab/dex
dex/dex.py
Dex.watch_logfile
def watch_logfile(self, logfile_path): """Analyzes queries from the tail of a given log file""" self._run_stats['logSource'] = logfile_path log_parser = LogParser() # For each new line in the logfile ... output_time = time.time() + WATCH_DISPLAY_REFRESH_SECONDS try: firstLine = True for line in self._tail_file(open(logfile_path), WATCH_INTERVAL_SECONDS): if firstLine: self._run_stats['timeRange']['start'] = get_line_time(line) self._process_query(line, log_parser) self._run_stats['timeRange']['end'] = get_line_time(line) if time.time() >= output_time: self._output_aggregated_report(sys.stderr) output_time = time.time() + WATCH_DISPLAY_REFRESH_SECONDS except KeyboardInterrupt: sys.stderr.write("Interrupt received\n") finally: self._output_aggregated_report(sys.stdout) return 0
python
def watch_logfile(self, logfile_path): self._run_stats['logSource'] = logfile_path log_parser = LogParser() # For each new line in the logfile ... output_time = time.time() + WATCH_DISPLAY_REFRESH_SECONDS try: firstLine = True for line in self._tail_file(open(logfile_path), WATCH_INTERVAL_SECONDS): if firstLine: self._run_stats['timeRange']['start'] = get_line_time(line) self._process_query(line, log_parser) self._run_stats['timeRange']['end'] = get_line_time(line) if time.time() >= output_time: self._output_aggregated_report(sys.stderr) output_time = time.time() + WATCH_DISPLAY_REFRESH_SECONDS except KeyboardInterrupt: sys.stderr.write("Interrupt received\n") finally: self._output_aggregated_report(sys.stdout) return 0
[ "def", "watch_logfile", "(", "self", ",", "logfile_path", ")", ":", "self", ".", "_run_stats", "[", "'logSource'", "]", "=", "logfile_path", "log_parser", "=", "LogParser", "(", ")", "# For each new line in the logfile ...", "output_time", "=", "time", ".", "time"...
Analyzes queries from the tail of a given log file
[ "Analyzes", "queries", "from", "the", "tail", "of", "a", "given", "log", "file" ]
f6dc27321028ef1ffdb3d4b1165fdcce7c8f20aa
https://github.com/mongolab/dex/blob/f6dc27321028ef1ffdb3d4b1165fdcce7c8f20aa/dex/dex.py#L264-L287
8,769
mongolab/dex
dex/dex.py
Dex._tail_file
def _tail_file(self, file, interval): """Tails a file""" file.seek(0,2) while True: where = file.tell() line = file.readline() if not line: time.sleep(interval) file.seek(where) else: yield line
python
def _tail_file(self, file, interval): file.seek(0,2) while True: where = file.tell() line = file.readline() if not line: time.sleep(interval) file.seek(where) else: yield line
[ "def", "_tail_file", "(", "self", ",", "file", ",", "interval", ")", ":", "file", ".", "seek", "(", "0", ",", "2", ")", "while", "True", ":", "where", "=", "file", ".", "tell", "(", ")", "line", "=", "file", ".", "readline", "(", ")", "if", "no...
Tails a file
[ "Tails", "a", "file" ]
f6dc27321028ef1ffdb3d4b1165fdcce7c8f20aa
https://github.com/mongolab/dex/blob/f6dc27321028ef1ffdb3d4b1165fdcce7c8f20aa/dex/dex.py#L316-L326
8,770
mongolab/dex
dex/dex.py
Dex._tail_profile
def _tail_profile(self, db, interval): """Tails the system.profile collection""" latest_doc = None while latest_doc is None: time.sleep(interval) latest_doc = db['system.profile'].find_one() current_time = latest_doc['ts'] while True: time.sleep(interval) cursor = db['system.profile'].find({'ts': {'$gte': current_time}}).sort('ts', pymongo.ASCENDING) for doc in cursor: current_time = doc['ts'] yield doc
python
def _tail_profile(self, db, interval): latest_doc = None while latest_doc is None: time.sleep(interval) latest_doc = db['system.profile'].find_one() current_time = latest_doc['ts'] while True: time.sleep(interval) cursor = db['system.profile'].find({'ts': {'$gte': current_time}}).sort('ts', pymongo.ASCENDING) for doc in cursor: current_time = doc['ts'] yield doc
[ "def", "_tail_profile", "(", "self", ",", "db", ",", "interval", ")", ":", "latest_doc", "=", "None", "while", "latest_doc", "is", "None", ":", "time", ".", "sleep", "(", "interval", ")", "latest_doc", "=", "db", "[", "'system.profile'", "]", ".", "find_...
Tails the system.profile collection
[ "Tails", "the", "system", ".", "profile", "collection" ]
f6dc27321028ef1ffdb3d4b1165fdcce7c8f20aa
https://github.com/mongolab/dex/blob/f6dc27321028ef1ffdb3d4b1165fdcce7c8f20aa/dex/dex.py#L329-L343
8,771
mongolab/dex
dex/dex.py
Dex._tuplefy_namespace
def _tuplefy_namespace(self, namespace): """Converts a mongodb namespace to a db, collection tuple""" namespace_split = namespace.split('.', 1) if len(namespace_split) is 1: # we treat a single element as a collection name. # this also properly tuplefies '*' namespace_tuple = ('*', namespace_split[0]) elif len(namespace_split) is 2: namespace_tuple = (namespace_split[0],namespace_split[1]) else: return None return namespace_tuple
python
def _tuplefy_namespace(self, namespace): namespace_split = namespace.split('.', 1) if len(namespace_split) is 1: # we treat a single element as a collection name. # this also properly tuplefies '*' namespace_tuple = ('*', namespace_split[0]) elif len(namespace_split) is 2: namespace_tuple = (namespace_split[0],namespace_split[1]) else: return None return namespace_tuple
[ "def", "_tuplefy_namespace", "(", "self", ",", "namespace", ")", ":", "namespace_split", "=", "namespace", ".", "split", "(", "'.'", ",", "1", ")", "if", "len", "(", "namespace_split", ")", "is", "1", ":", "# we treat a single element as a collection name.", "# ...
Converts a mongodb namespace to a db, collection tuple
[ "Converts", "a", "mongodb", "namespace", "to", "a", "db", "collection", "tuple" ]
f6dc27321028ef1ffdb3d4b1165fdcce7c8f20aa
https://github.com/mongolab/dex/blob/f6dc27321028ef1ffdb3d4b1165fdcce7c8f20aa/dex/dex.py#L347-L358
8,772
mongolab/dex
dex/dex.py
Dex._validate_namespaces
def _validate_namespaces(self, input_namespaces): """Converts a list of db namespaces to a list of namespace tuples, supporting basic commandline wildcards""" output_namespaces = [] if input_namespaces == []: return output_namespaces elif '*' in input_namespaces: if len(input_namespaces) > 1: warning = 'Warning: Multiple namespaces are ' warning += 'ignored when one namespace is "*"\n' sys.stderr.write(warning) return output_namespaces else: for namespace in input_namespaces: if not isinstance(namespace, unicode): namespace = unicode(namespace) namespace_tuple = self._tuplefy_namespace(namespace) if namespace_tuple is None: warning = 'Warning: Invalid namespace ' + namespace warning += ' will be ignored\n' sys.stderr.write(warning) else: if namespace_tuple not in output_namespaces: output_namespaces.append(namespace_tuple) else: warning = 'Warning: Duplicate namespace ' + namespace warning += ' will be ignored\n' sys.stderr.write(warning) return output_namespaces
python
def _validate_namespaces(self, input_namespaces): output_namespaces = [] if input_namespaces == []: return output_namespaces elif '*' in input_namespaces: if len(input_namespaces) > 1: warning = 'Warning: Multiple namespaces are ' warning += 'ignored when one namespace is "*"\n' sys.stderr.write(warning) return output_namespaces else: for namespace in input_namespaces: if not isinstance(namespace, unicode): namespace = unicode(namespace) namespace_tuple = self._tuplefy_namespace(namespace) if namespace_tuple is None: warning = 'Warning: Invalid namespace ' + namespace warning += ' will be ignored\n' sys.stderr.write(warning) else: if namespace_tuple not in output_namespaces: output_namespaces.append(namespace_tuple) else: warning = 'Warning: Duplicate namespace ' + namespace warning += ' will be ignored\n' sys.stderr.write(warning) return output_namespaces
[ "def", "_validate_namespaces", "(", "self", ",", "input_namespaces", ")", ":", "output_namespaces", "=", "[", "]", "if", "input_namespaces", "==", "[", "]", ":", "return", "output_namespaces", "elif", "'*'", "in", "input_namespaces", ":", "if", "len", "(", "in...
Converts a list of db namespaces to a list of namespace tuples, supporting basic commandline wildcards
[ "Converts", "a", "list", "of", "db", "namespaces", "to", "a", "list", "of", "namespace", "tuples", "supporting", "basic", "commandline", "wildcards" ]
f6dc27321028ef1ffdb3d4b1165fdcce7c8f20aa
https://github.com/mongolab/dex/blob/f6dc27321028ef1ffdb3d4b1165fdcce7c8f20aa/dex/dex.py#L362-L390
8,773
mongolab/dex
dex/dex.py
Dex._namespace_requested
def _namespace_requested(self, namespace): """Checks whether the requested_namespaces contain the provided namespace""" if namespace is None: return False namespace_tuple = self._tuplefy_namespace(namespace) if namespace_tuple[0] in IGNORE_DBS: return False elif namespace_tuple[1] in IGNORE_COLLECTIONS: return False else: return self._tuple_requested(namespace_tuple)
python
def _namespace_requested(self, namespace): if namespace is None: return False namespace_tuple = self._tuplefy_namespace(namespace) if namespace_tuple[0] in IGNORE_DBS: return False elif namespace_tuple[1] in IGNORE_COLLECTIONS: return False else: return self._tuple_requested(namespace_tuple)
[ "def", "_namespace_requested", "(", "self", ",", "namespace", ")", ":", "if", "namespace", "is", "None", ":", "return", "False", "namespace_tuple", "=", "self", ".", "_tuplefy_namespace", "(", "namespace", ")", "if", "namespace_tuple", "[", "0", "]", "in", "...
Checks whether the requested_namespaces contain the provided namespace
[ "Checks", "whether", "the", "requested_namespaces", "contain", "the", "provided", "namespace" ]
f6dc27321028ef1ffdb3d4b1165fdcce7c8f20aa
https://github.com/mongolab/dex/blob/f6dc27321028ef1ffdb3d4b1165fdcce7c8f20aa/dex/dex.py#L393-L404
8,774
mongolab/dex
dex/dex.py
Dex._tuple_requested
def _tuple_requested(self, namespace_tuple): """Helper for _namespace_requested. Supports limited wildcards""" if not isinstance(namespace_tuple[0], unicode): encoded_db = unicode(namespace_tuple[0]) else: encoded_db = namespace_tuple[0] if not isinstance(namespace_tuple[1], unicode): encoded_coll = unicode(namespace_tuple[1]) else: encoded_coll = namespace_tuple[1] if namespace_tuple is None: return False elif len(self._requested_namespaces) is 0: return True for requested_namespace in self._requested_namespaces: if ((((requested_namespace[0]) == u'*') or (encoded_db == requested_namespace[0])) and (((requested_namespace[1]) == u'*') or (encoded_coll == requested_namespace[1]))): return True return False
python
def _tuple_requested(self, namespace_tuple): if not isinstance(namespace_tuple[0], unicode): encoded_db = unicode(namespace_tuple[0]) else: encoded_db = namespace_tuple[0] if not isinstance(namespace_tuple[1], unicode): encoded_coll = unicode(namespace_tuple[1]) else: encoded_coll = namespace_tuple[1] if namespace_tuple is None: return False elif len(self._requested_namespaces) is 0: return True for requested_namespace in self._requested_namespaces: if ((((requested_namespace[0]) == u'*') or (encoded_db == requested_namespace[0])) and (((requested_namespace[1]) == u'*') or (encoded_coll == requested_namespace[1]))): return True return False
[ "def", "_tuple_requested", "(", "self", ",", "namespace_tuple", ")", ":", "if", "not", "isinstance", "(", "namespace_tuple", "[", "0", "]", ",", "unicode", ")", ":", "encoded_db", "=", "unicode", "(", "namespace_tuple", "[", "0", "]", ")", "else", ":", "...
Helper for _namespace_requested. Supports limited wildcards
[ "Helper", "for", "_namespace_requested", ".", "Supports", "limited", "wildcards" ]
f6dc27321028ef1ffdb3d4b1165fdcce7c8f20aa
https://github.com/mongolab/dex/blob/f6dc27321028ef1ffdb3d4b1165fdcce7c8f20aa/dex/dex.py#L407-L428
8,775
mongolab/dex
dex/dex.py
Dex._get_requested_databases
def _get_requested_databases(self): """Returns a list of databases requested, not including ignored dbs""" requested_databases = [] if ((self._requested_namespaces is not None) and (self._requested_namespaces != [])): for requested_namespace in self._requested_namespaces: if requested_namespace[0] is '*': return [] elif requested_namespace[0] not in IGNORE_DBS: requested_databases.append(requested_namespace[0]) return requested_databases
python
def _get_requested_databases(self): requested_databases = [] if ((self._requested_namespaces is not None) and (self._requested_namespaces != [])): for requested_namespace in self._requested_namespaces: if requested_namespace[0] is '*': return [] elif requested_namespace[0] not in IGNORE_DBS: requested_databases.append(requested_namespace[0]) return requested_databases
[ "def", "_get_requested_databases", "(", "self", ")", ":", "requested_databases", "=", "[", "]", "if", "(", "(", "self", ".", "_requested_namespaces", "is", "not", "None", ")", "and", "(", "self", ".", "_requested_namespaces", "!=", "[", "]", ")", ")", ":",...
Returns a list of databases requested, not including ignored dbs
[ "Returns", "a", "list", "of", "databases", "requested", "not", "including", "ignored", "dbs" ]
f6dc27321028ef1ffdb3d4b1165fdcce7c8f20aa
https://github.com/mongolab/dex/blob/f6dc27321028ef1ffdb3d4b1165fdcce7c8f20aa/dex/dex.py#L431-L441
8,776
napalm-automation-community/napalm-fortios
napalm_fortios/fortios.py
FortiOSDriver.get_config
def get_config(self, retrieve="all"): """get_config implementation for FortiOS.""" get_startup = retrieve == "all" or retrieve == "startup" get_running = retrieve == "all" or retrieve == "running" get_candidate = retrieve == "all" or retrieve == "candidate" if retrieve == "all" or get_running: result = self._execute_command_with_vdom('show') text_result = '\n'.join(result) return { 'startup': u"", 'running': py23_compat.text_type(text_result), 'candidate': u"", } elif get_startup or get_candidate: return { 'startup': u"", 'running': u"", 'candidate': u"", }
python
def get_config(self, retrieve="all"): get_startup = retrieve == "all" or retrieve == "startup" get_running = retrieve == "all" or retrieve == "running" get_candidate = retrieve == "all" or retrieve == "candidate" if retrieve == "all" or get_running: result = self._execute_command_with_vdom('show') text_result = '\n'.join(result) return { 'startup': u"", 'running': py23_compat.text_type(text_result), 'candidate': u"", } elif get_startup or get_candidate: return { 'startup': u"", 'running': u"", 'candidate': u"", }
[ "def", "get_config", "(", "self", ",", "retrieve", "=", "\"all\"", ")", ":", "get_startup", "=", "retrieve", "==", "\"all\"", "or", "retrieve", "==", "\"startup\"", "get_running", "=", "retrieve", "==", "\"all\"", "or", "retrieve", "==", "\"running\"", "get_ca...
get_config implementation for FortiOS.
[ "get_config", "implementation", "for", "FortiOS", "." ]
7cb0723b079ab523211d98751e15bf148a9a69b2
https://github.com/napalm-automation-community/napalm-fortios/blob/7cb0723b079ab523211d98751e15bf148a9a69b2/napalm_fortios/fortios.py#L162-L183
8,777
JohnDoee/deluge-client
deluge_client/client.py
DelugeRPCClient.connect
def connect(self): """ Connects to the Deluge instance """ self._connect() logger.debug('Connected to Deluge, detecting daemon version') self._detect_deluge_version() logger.debug('Daemon version {} detected, logging in'.format(self.deluge_version)) if self.deluge_version == 2: result = self.call('daemon.login', self.username, self.password, client_version='deluge-client') else: result = self.call('daemon.login', self.username, self.password) logger.debug('Logged in with value %r' % result) self.connected = True
python
def connect(self): self._connect() logger.debug('Connected to Deluge, detecting daemon version') self._detect_deluge_version() logger.debug('Daemon version {} detected, logging in'.format(self.deluge_version)) if self.deluge_version == 2: result = self.call('daemon.login', self.username, self.password, client_version='deluge-client') else: result = self.call('daemon.login', self.username, self.password) logger.debug('Logged in with value %r' % result) self.connected = True
[ "def", "connect", "(", "self", ")", ":", "self", ".", "_connect", "(", ")", "logger", ".", "debug", "(", "'Connected to Deluge, detecting daemon version'", ")", "self", ".", "_detect_deluge_version", "(", ")", "logger", ".", "debug", "(", "'Daemon version {} detec...
Connects to the Deluge instance
[ "Connects", "to", "the", "Deluge", "instance" ]
388512661b0bb2410c78185695ce564703b0e2fe
https://github.com/JohnDoee/deluge-client/blob/388512661b0bb2410c78185695ce564703b0e2fe/deluge_client/client.py#L74-L87
8,778
JohnDoee/deluge-client
deluge_client/client.py
DelugeRPCClient.disconnect
def disconnect(self): """ Disconnect from deluge """ if self.connected: self._socket.close() self._socket = None self.connected = False
python
def disconnect(self): if self.connected: self._socket.close() self._socket = None self.connected = False
[ "def", "disconnect", "(", "self", ")", ":", "if", "self", ".", "connected", ":", "self", ".", "_socket", ".", "close", "(", ")", "self", ".", "_socket", "=", "None", "self", ".", "connected", "=", "False" ]
Disconnect from deluge
[ "Disconnect", "from", "deluge" ]
388512661b0bb2410c78185695ce564703b0e2fe
https://github.com/JohnDoee/deluge-client/blob/388512661b0bb2410c78185695ce564703b0e2fe/deluge_client/client.py#L103-L110
8,779
JohnDoee/deluge-client
deluge_client/client.py
DelugeRPCClient.call
def call(self, method, *args, **kwargs): """ Calls an RPC function """ tried_reconnect = False for _ in range(2): try: self._send_call(self.deluge_version, self.deluge_protocol_version, method, *args, **kwargs) return self._receive_response(self.deluge_version, self.deluge_protocol_version) except (socket.error, ConnectionLostException, CallTimeoutException): if self.automatic_reconnect: if tried_reconnect: raise FailedToReconnectException() else: try: self.reconnect() except (socket.error, ConnectionLostException, CallTimeoutException): raise FailedToReconnectException() tried_reconnect = True else: raise
python
def call(self, method, *args, **kwargs): tried_reconnect = False for _ in range(2): try: self._send_call(self.deluge_version, self.deluge_protocol_version, method, *args, **kwargs) return self._receive_response(self.deluge_version, self.deluge_protocol_version) except (socket.error, ConnectionLostException, CallTimeoutException): if self.automatic_reconnect: if tried_reconnect: raise FailedToReconnectException() else: try: self.reconnect() except (socket.error, ConnectionLostException, CallTimeoutException): raise FailedToReconnectException() tried_reconnect = True else: raise
[ "def", "call", "(", "self", ",", "method", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "tried_reconnect", "=", "False", "for", "_", "in", "range", "(", "2", ")", ":", "try", ":", "self", ".", "_send_call", "(", "self", ".", "deluge_version...
Calls an RPC function
[ "Calls", "an", "RPC", "function" ]
388512661b0bb2410c78185695ce564703b0e2fe
https://github.com/JohnDoee/deluge-client/blob/388512661b0bb2410c78185695ce564703b0e2fe/deluge_client/client.py#L239-L260
8,780
luckydonald/pytgbot
code_generation/output/pytgbot/api_types/receivable/stickers.py
StickerSet.to_array
def to_array(self): """ Serializes this StickerSet to a dictionary. :return: dictionary representation of this object. :rtype: dict """ array = super(StickerSet, self).to_array() array['name'] = u(self.name) # py2: type unicode, py3: type str array['title'] = u(self.title) # py2: type unicode, py3: type str array['contains_masks'] = bool(self.contains_masks) # type bool array['stickers'] = self._as_array(self.stickers) # type list of Sticker return array
python
def to_array(self): array = super(StickerSet, self).to_array() array['name'] = u(self.name) # py2: type unicode, py3: type str array['title'] = u(self.title) # py2: type unicode, py3: type str array['contains_masks'] = bool(self.contains_masks) # type bool array['stickers'] = self._as_array(self.stickers) # type list of Sticker return array
[ "def", "to_array", "(", "self", ")", ":", "array", "=", "super", "(", "StickerSet", ",", "self", ")", ".", "to_array", "(", ")", "array", "[", "'name'", "]", "=", "u", "(", "self", ".", "name", ")", "# py2: type unicode, py3: type str", "array", "[", "...
Serializes this StickerSet to a dictionary. :return: dictionary representation of this object. :rtype: dict
[ "Serializes", "this", "StickerSet", "to", "a", "dictionary", "." ]
67f4b5a1510d4583d40b5477e876b1ef0eb8971b
https://github.com/luckydonald/pytgbot/blob/67f4b5a1510d4583d40b5477e876b1ef0eb8971b/code_generation/output/pytgbot/api_types/receivable/stickers.py#L81-L96
8,781
luckydonald/pytgbot
code_generation/output/pytgbot/api_types/receivable/stickers.py
StickerSet.from_array
def from_array(array): """ Deserialize a new StickerSet from a given dictionary. :return: new StickerSet instance. :rtype: StickerSet """ if array is None or not array: return None # end if assert_type_or_raise(array, dict, parameter_name="array") from pytgbot.api_types.receivable.media import Sticker data = {} data['name'] = u(array.get('name')) data['title'] = u(array.get('title')) data['contains_masks'] = bool(array.get('contains_masks')) data['stickers'] = Sticker.from_array_list(array.get('stickers'), list_level=1) data['_raw'] = array return StickerSet(**data)
python
def from_array(array): if array is None or not array: return None # end if assert_type_or_raise(array, dict, parameter_name="array") from pytgbot.api_types.receivable.media import Sticker data = {} data['name'] = u(array.get('name')) data['title'] = u(array.get('title')) data['contains_masks'] = bool(array.get('contains_masks')) data['stickers'] = Sticker.from_array_list(array.get('stickers'), list_level=1) data['_raw'] = array return StickerSet(**data)
[ "def", "from_array", "(", "array", ")", ":", "if", "array", "is", "None", "or", "not", "array", ":", "return", "None", "# end if", "assert_type_or_raise", "(", "array", ",", "dict", ",", "parameter_name", "=", "\"array\"", ")", "from", "pytgbot", ".", "api...
Deserialize a new StickerSet from a given dictionary. :return: new StickerSet instance. :rtype: StickerSet
[ "Deserialize", "a", "new", "StickerSet", "from", "a", "given", "dictionary", "." ]
67f4b5a1510d4583d40b5477e876b1ef0eb8971b
https://github.com/luckydonald/pytgbot/blob/67f4b5a1510d4583d40b5477e876b1ef0eb8971b/code_generation/output/pytgbot/api_types/receivable/stickers.py#L100-L120
8,782
luckydonald/pytgbot
code_generation/output/pytgbot/api_types/receivable/stickers.py
MaskPosition.to_array
def to_array(self): """ Serializes this MaskPosition to a dictionary. :return: dictionary representation of this object. :rtype: dict """ array = super(MaskPosition, self).to_array() array['point'] = u(self.point) # py2: type unicode, py3: type str array['x_shift'] = float(self.x_shift) # type float array['y_shift'] = float(self.y_shift) # type float array['scale'] = float(self.scale) # type float return array
python
def to_array(self): array = super(MaskPosition, self).to_array() array['point'] = u(self.point) # py2: type unicode, py3: type str array['x_shift'] = float(self.x_shift) # type float array['y_shift'] = float(self.y_shift) # type float array['scale'] = float(self.scale) # type float return array
[ "def", "to_array", "(", "self", ")", ":", "array", "=", "super", "(", "MaskPosition", ",", "self", ")", ".", "to_array", "(", ")", "array", "[", "'point'", "]", "=", "u", "(", "self", ".", "point", ")", "# py2: type unicode, py3: type str", "array", "[",...
Serializes this MaskPosition to a dictionary. :return: dictionary representation of this object. :rtype: dict
[ "Serializes", "this", "MaskPosition", "to", "a", "dictionary", "." ]
67f4b5a1510d4583d40b5477e876b1ef0eb8971b
https://github.com/luckydonald/pytgbot/blob/67f4b5a1510d4583d40b5477e876b1ef0eb8971b/code_generation/output/pytgbot/api_types/receivable/stickers.py#L220-L233
8,783
luckydonald/pytgbot
code_generation/output/pytgbot/api_types/receivable/stickers.py
MaskPosition.from_array
def from_array(array): """ Deserialize a new MaskPosition from a given dictionary. :return: new MaskPosition instance. :rtype: MaskPosition """ if array is None or not array: return None # end if assert_type_or_raise(array, dict, parameter_name="array") data = {} data['point'] = u(array.get('point')) data['x_shift'] = float(array.get('x_shift')) data['y_shift'] = float(array.get('y_shift')) data['scale'] = float(array.get('scale')) data['_raw'] = array return MaskPosition(**data)
python
def from_array(array): if array is None or not array: return None # end if assert_type_or_raise(array, dict, parameter_name="array") data = {} data['point'] = u(array.get('point')) data['x_shift'] = float(array.get('x_shift')) data['y_shift'] = float(array.get('y_shift')) data['scale'] = float(array.get('scale')) data['_raw'] = array return MaskPosition(**data)
[ "def", "from_array", "(", "array", ")", ":", "if", "array", "is", "None", "or", "not", "array", ":", "return", "None", "# end if", "assert_type_or_raise", "(", "array", ",", "dict", ",", "parameter_name", "=", "\"array\"", ")", "data", "=", "{", "}", "da...
Deserialize a new MaskPosition from a given dictionary. :return: new MaskPosition instance. :rtype: MaskPosition
[ "Deserialize", "a", "new", "MaskPosition", "from", "a", "given", "dictionary", "." ]
67f4b5a1510d4583d40b5477e876b1ef0eb8971b
https://github.com/luckydonald/pytgbot/blob/67f4b5a1510d4583d40b5477e876b1ef0eb8971b/code_generation/output/pytgbot/api_types/receivable/stickers.py#L237-L255
8,784
delph-in/pydelphin
delphin/interfaces/ace.py
compile
def compile(cfg_path, out_path, executable=None, env=None, log=None): """ Use ACE to compile a grammar. Args: cfg_path (str): the path to the ACE config file out_path (str): the path where the compiled grammar will be written executable (str, optional): the path to the ACE binary; if `None`, the `ace` command will be used env (dict, optional): environment variables to pass to the ACE subprocess log (file, optional): if given, the file, opened for writing, or stream to write ACE's stdout and stderr compile messages """ try: check_call( [(executable or 'ace'), '-g', cfg_path, '-G', out_path], stdout=log, stderr=log, close_fds=True, env=(env or os.environ) ) except (CalledProcessError, OSError): logging.error( 'Failed to compile grammar with ACE. See {}' .format(log.name if log is not None else '<stderr>') ) raise
python
def compile(cfg_path, out_path, executable=None, env=None, log=None): try: check_call( [(executable or 'ace'), '-g', cfg_path, '-G', out_path], stdout=log, stderr=log, close_fds=True, env=(env or os.environ) ) except (CalledProcessError, OSError): logging.error( 'Failed to compile grammar with ACE. See {}' .format(log.name if log is not None else '<stderr>') ) raise
[ "def", "compile", "(", "cfg_path", ",", "out_path", ",", "executable", "=", "None", ",", "env", "=", "None", ",", "log", "=", "None", ")", ":", "try", ":", "check_call", "(", "[", "(", "executable", "or", "'ace'", ")", ",", "'-g'", ",", "cfg_path", ...
Use ACE to compile a grammar. Args: cfg_path (str): the path to the ACE config file out_path (str): the path where the compiled grammar will be written executable (str, optional): the path to the ACE binary; if `None`, the `ace` command will be used env (dict, optional): environment variables to pass to the ACE subprocess log (file, optional): if given, the file, opened for writing, or stream to write ACE's stdout and stderr compile messages
[ "Use", "ACE", "to", "compile", "a", "grammar", "." ]
7bd2cd63ab7cf74803e1d6547b9ebc014b382abd
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/interfaces/ace.py#L439-L465
8,785
delph-in/pydelphin
delphin/interfaces/ace.py
AceProcess.close
def close(self): """ Close the ACE process and return the process's exit code. """ self.run_info['end'] = datetime.now() self._p.stdin.close() for line in self._p.stdout: if line.startswith('NOTE: tsdb run:'): self._read_run_info(line) else: logging.debug('ACE cleanup: {}'.format(line.rstrip())) retval = self._p.wait() return retval
python
def close(self): self.run_info['end'] = datetime.now() self._p.stdin.close() for line in self._p.stdout: if line.startswith('NOTE: tsdb run:'): self._read_run_info(line) else: logging.debug('ACE cleanup: {}'.format(line.rstrip())) retval = self._p.wait() return retval
[ "def", "close", "(", "self", ")", ":", "self", ".", "run_info", "[", "'end'", "]", "=", "datetime", ".", "now", "(", ")", "self", ".", "_p", ".", "stdin", ".", "close", "(", ")", "for", "line", "in", "self", ".", "_p", ".", "stdout", ":", "if",...
Close the ACE process and return the process's exit code.
[ "Close", "the", "ACE", "process", "and", "return", "the", "process", "s", "exit", "code", "." ]
7bd2cd63ab7cf74803e1d6547b9ebc014b382abd
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/interfaces/ace.py#L316-L328
8,786
delph-in/pydelphin
delphin/mrs/dmrx.py
loads
def loads(s, single=False): """ Deserialize DMRX string representations Args: s (str): a DMRX string single (bool): if `True`, only return the first Xmrs object Returns: a generator of Xmrs objects (unless *single* is `True`) """ corpus = etree.fromstring(s) if single: ds = _deserialize_dmrs(next(iter(corpus))) else: ds = (_deserialize_dmrs(dmrs_elem) for dmrs_elem in corpus) return ds
python
def loads(s, single=False): corpus = etree.fromstring(s) if single: ds = _deserialize_dmrs(next(iter(corpus))) else: ds = (_deserialize_dmrs(dmrs_elem) for dmrs_elem in corpus) return ds
[ "def", "loads", "(", "s", ",", "single", "=", "False", ")", ":", "corpus", "=", "etree", ".", "fromstring", "(", "s", ")", "if", "single", ":", "ds", "=", "_deserialize_dmrs", "(", "next", "(", "iter", "(", "corpus", ")", ")", ")", "else", ":", "...
Deserialize DMRX string representations Args: s (str): a DMRX string single (bool): if `True`, only return the first Xmrs object Returns: a generator of Xmrs objects (unless *single* is `True`)
[ "Deserialize", "DMRX", "string", "representations" ]
7bd2cd63ab7cf74803e1d6547b9ebc014b382abd
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/mrs/dmrx.py#L41-L56
8,787
delph-in/pydelphin
delphin/interfaces/base.py
ParseResult.derivation
def derivation(self): """ Deserialize and return a Derivation object for UDF- or JSON-formatted derivation data; otherwise return the original string. """ drv = self.get('derivation') if drv is not None: if isinstance(drv, dict): drv = Derivation.from_dict(drv) elif isinstance(drv, stringtypes): drv = Derivation.from_string(drv) return drv
python
def derivation(self): drv = self.get('derivation') if drv is not None: if isinstance(drv, dict): drv = Derivation.from_dict(drv) elif isinstance(drv, stringtypes): drv = Derivation.from_string(drv) return drv
[ "def", "derivation", "(", "self", ")", ":", "drv", "=", "self", ".", "get", "(", "'derivation'", ")", "if", "drv", "is", "not", "None", ":", "if", "isinstance", "(", "drv", ",", "dict", ")", ":", "drv", "=", "Derivation", ".", "from_dict", "(", "dr...
Deserialize and return a Derivation object for UDF- or JSON-formatted derivation data; otherwise return the original string.
[ "Deserialize", "and", "return", "a", "Derivation", "object", "for", "UDF", "-", "or", "JSON", "-", "formatted", "derivation", "data", ";", "otherwise", "return", "the", "original", "string", "." ]
7bd2cd63ab7cf74803e1d6547b9ebc014b382abd
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/interfaces/base.py#L64-L76
8,788
delph-in/pydelphin
delphin/interfaces/base.py
ParseResult.tree
def tree(self): """ Deserialize and return a labeled syntax tree. The tree data may be a standalone datum, or embedded in the derivation. """ tree = self.get('tree') if isinstance(tree, stringtypes): tree = SExpr.parse(tree).data elif tree is None: drv = self.get('derivation') if isinstance(drv, dict) and 'label' in drv: def _extract_tree(d): t = [d.get('label', '')] if 'tokens' in d: t.append([d.get('form', '')]) else: for dtr in d.get('daughters', []): t.append(_extract_tree(dtr)) return t tree = _extract_tree(drv) return tree
python
def tree(self): tree = self.get('tree') if isinstance(tree, stringtypes): tree = SExpr.parse(tree).data elif tree is None: drv = self.get('derivation') if isinstance(drv, dict) and 'label' in drv: def _extract_tree(d): t = [d.get('label', '')] if 'tokens' in d: t.append([d.get('form', '')]) else: for dtr in d.get('daughters', []): t.append(_extract_tree(dtr)) return t tree = _extract_tree(drv) return tree
[ "def", "tree", "(", "self", ")", ":", "tree", "=", "self", ".", "get", "(", "'tree'", ")", "if", "isinstance", "(", "tree", ",", "stringtypes", ")", ":", "tree", "=", "SExpr", ".", "parse", "(", "tree", ")", ".", "data", "elif", "tree", "is", "No...
Deserialize and return a labeled syntax tree. The tree data may be a standalone datum, or embedded in the derivation.
[ "Deserialize", "and", "return", "a", "labeled", "syntax", "tree", ".", "The", "tree", "data", "may", "be", "a", "standalone", "datum", "or", "embedded", "in", "the", "derivation", "." ]
7bd2cd63ab7cf74803e1d6547b9ebc014b382abd
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/interfaces/base.py#L78-L98
8,789
delph-in/pydelphin
delphin/interfaces/base.py
ParseResult.mrs
def mrs(self): """ Deserialize and return an Mrs object for simplemrs or JSON-formatted MRS data; otherwise return the original string. """ mrs = self.get('mrs') if mrs is not None: if isinstance(mrs, dict): mrs = Mrs.from_dict(mrs) elif isinstance(mrs, stringtypes): mrs = simplemrs.loads_one(mrs) return mrs
python
def mrs(self): mrs = self.get('mrs') if mrs is not None: if isinstance(mrs, dict): mrs = Mrs.from_dict(mrs) elif isinstance(mrs, stringtypes): mrs = simplemrs.loads_one(mrs) return mrs
[ "def", "mrs", "(", "self", ")", ":", "mrs", "=", "self", ".", "get", "(", "'mrs'", ")", "if", "mrs", "is", "not", "None", ":", "if", "isinstance", "(", "mrs", ",", "dict", ")", ":", "mrs", "=", "Mrs", ".", "from_dict", "(", "mrs", ")", "elif", ...
Deserialize and return an Mrs object for simplemrs or JSON-formatted MRS data; otherwise return the original string.
[ "Deserialize", "and", "return", "an", "Mrs", "object", "for", "simplemrs", "or", "JSON", "-", "formatted", "MRS", "data", ";", "otherwise", "return", "the", "original", "string", "." ]
7bd2cd63ab7cf74803e1d6547b9ebc014b382abd
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/interfaces/base.py#L100-L111
8,790
delph-in/pydelphin
delphin/interfaces/base.py
ParseResult.eds
def eds(self): """ Deserialize and return an Eds object for native- or JSON-formatted EDS data; otherwise return the original string. """ _eds = self.get('eds') if _eds is not None: if isinstance(_eds, dict): _eds = eds.Eds.from_dict(_eds) elif isinstance(_eds, stringtypes): _eds = eds.loads_one(_eds) return _eds
python
def eds(self): _eds = self.get('eds') if _eds is not None: if isinstance(_eds, dict): _eds = eds.Eds.from_dict(_eds) elif isinstance(_eds, stringtypes): _eds = eds.loads_one(_eds) return _eds
[ "def", "eds", "(", "self", ")", ":", "_eds", "=", "self", ".", "get", "(", "'eds'", ")", "if", "_eds", "is", "not", "None", ":", "if", "isinstance", "(", "_eds", ",", "dict", ")", ":", "_eds", "=", "eds", ".", "Eds", ".", "from_dict", "(", "_ed...
Deserialize and return an Eds object for native- or JSON-formatted EDS data; otherwise return the original string.
[ "Deserialize", "and", "return", "an", "Eds", "object", "for", "native", "-", "or", "JSON", "-", "formatted", "EDS", "data", ";", "otherwise", "return", "the", "original", "string", "." ]
7bd2cd63ab7cf74803e1d6547b9ebc014b382abd
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/interfaces/base.py#L113-L124
8,791
delph-in/pydelphin
delphin/interfaces/base.py
ParseResult.dmrs
def dmrs(self): """ Deserialize and return a Dmrs object for JSON-formatted DMRS data; otherwise return the original string. """ dmrs = self.get('dmrs') if dmrs is not None: if isinstance(dmrs, dict): dmrs = Dmrs.from_dict(dmrs) return dmrs
python
def dmrs(self): dmrs = self.get('dmrs') if dmrs is not None: if isinstance(dmrs, dict): dmrs = Dmrs.from_dict(dmrs) return dmrs
[ "def", "dmrs", "(", "self", ")", ":", "dmrs", "=", "self", ".", "get", "(", "'dmrs'", ")", "if", "dmrs", "is", "not", "None", ":", "if", "isinstance", "(", "dmrs", ",", "dict", ")", ":", "dmrs", "=", "Dmrs", ".", "from_dict", "(", "dmrs", ")", ...
Deserialize and return a Dmrs object for JSON-formatted DMRS data; otherwise return the original string.
[ "Deserialize", "and", "return", "a", "Dmrs", "object", "for", "JSON", "-", "formatted", "DMRS", "data", ";", "otherwise", "return", "the", "original", "string", "." ]
7bd2cd63ab7cf74803e1d6547b9ebc014b382abd
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/interfaces/base.py#L126-L135
8,792
delph-in/pydelphin
delphin/interfaces/base.py
ParseResponse.tokens
def tokens(self, tokenset='internal'): """ Deserialize and return a YyTokenLattice object for the initial or internal token set, if provided, from the YY format or the JSON-formatted data; otherwise return the original string. Args: tokenset (str): return `'initial'` or `'internal'` tokens (default: `'internal'`) Returns: :class:`YyTokenLattice` """ toks = self.get('tokens', {}).get(tokenset) if toks is not None: if isinstance(toks, stringtypes): toks = YyTokenLattice.from_string(toks) elif isinstance(toks, Sequence): toks = YyTokenLattice.from_list(toks) return toks
python
def tokens(self, tokenset='internal'): toks = self.get('tokens', {}).get(tokenset) if toks is not None: if isinstance(toks, stringtypes): toks = YyTokenLattice.from_string(toks) elif isinstance(toks, Sequence): toks = YyTokenLattice.from_list(toks) return toks
[ "def", "tokens", "(", "self", ",", "tokenset", "=", "'internal'", ")", ":", "toks", "=", "self", ".", "get", "(", "'tokens'", ",", "{", "}", ")", ".", "get", "(", "tokenset", ")", "if", "toks", "is", "not", "None", ":", "if", "isinstance", "(", "...
Deserialize and return a YyTokenLattice object for the initial or internal token set, if provided, from the YY format or the JSON-formatted data; otherwise return the original string. Args: tokenset (str): return `'initial'` or `'internal'` tokens (default: `'internal'`) Returns: :class:`YyTokenLattice`
[ "Deserialize", "and", "return", "a", "YyTokenLattice", "object", "for", "the", "initial", "or", "internal", "token", "set", "if", "provided", "from", "the", "YY", "format", "or", "the", "JSON", "-", "formatted", "data", ";", "otherwise", "return", "the", "or...
7bd2cd63ab7cf74803e1d6547b9ebc014b382abd
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/interfaces/base.py#L155-L174
8,793
luckydonald/pytgbot
pytgbot/api_types/receivable/game.py
GameHighScore.to_array
def to_array(self): """ Serializes this GameHighScore to a dictionary. :return: dictionary representation of this object. :rtype: dict """ array = super(GameHighScore, self).to_array() array['position'] = int(self.position) # type int array['user'] = self.user.to_array() # type User array['score'] = int(self.score) # type int return array
python
def to_array(self): array = super(GameHighScore, self).to_array() array['position'] = int(self.position) # type int array['user'] = self.user.to_array() # type User array['score'] = int(self.score) # type int return array
[ "def", "to_array", "(", "self", ")", ":", "array", "=", "super", "(", "GameHighScore", ",", "self", ")", ".", "to_array", "(", ")", "array", "[", "'position'", "]", "=", "int", "(", "self", ".", "position", ")", "# type int", "array", "[", "'user'", ...
Serializes this GameHighScore to a dictionary. :return: dictionary representation of this object. :rtype: dict
[ "Serializes", "this", "GameHighScore", "to", "a", "dictionary", "." ]
67f4b5a1510d4583d40b5477e876b1ef0eb8971b
https://github.com/luckydonald/pytgbot/blob/67f4b5a1510d4583d40b5477e876b1ef0eb8971b/pytgbot/api_types/receivable/game.py#L74-L85
8,794
luckydonald/pytgbot
pytgbot/api_types/receivable/game.py
GameHighScore.from_array
def from_array(array): """ Deserialize a new GameHighScore from a given dictionary. :return: new GameHighScore instance. :rtype: GameHighScore """ if array is None or not array: return None # end if assert_type_or_raise(array, dict, parameter_name="array") from pytgbot.api_types.receivable.peer import User data = {} data['position'] = int(array.get('position')) data['user'] = User.from_array(array.get('user')) data['score'] = int(array.get('score')) data['_raw'] = array return GameHighScore(**data)
python
def from_array(array): if array is None or not array: return None # end if assert_type_or_raise(array, dict, parameter_name="array") from pytgbot.api_types.receivable.peer import User data = {} data['position'] = int(array.get('position')) data['user'] = User.from_array(array.get('user')) data['score'] = int(array.get('score')) data['_raw'] = array return GameHighScore(**data)
[ "def", "from_array", "(", "array", ")", ":", "if", "array", "is", "None", "or", "not", "array", ":", "return", "None", "# end if", "assert_type_or_raise", "(", "array", ",", "dict", ",", "parameter_name", "=", "\"array\"", ")", "from", "pytgbot", ".", "api...
Deserialize a new GameHighScore from a given dictionary. :return: new GameHighScore instance. :rtype: GameHighScore
[ "Deserialize", "a", "new", "GameHighScore", "from", "a", "given", "dictionary", "." ]
67f4b5a1510d4583d40b5477e876b1ef0eb8971b
https://github.com/luckydonald/pytgbot/blob/67f4b5a1510d4583d40b5477e876b1ef0eb8971b/pytgbot/api_types/receivable/game.py#L89-L108
8,795
delph-in/pydelphin
delphin/lib/pegre.py
valuemap
def valuemap(f): """ Decorator to help PEG functions handle value conversions. """ @wraps(f) def wrapper(*args, **kwargs): if 'value' in kwargs: val = kwargs['value'] del kwargs['value'] _f = f(*args, **kwargs) def valued_f(*args, **kwargs): result = _f(*args, **kwargs) s, obj, span = result if callable(val): return PegreResult(s, val(obj), span) else: return PegreResult(s, val, span) return valued_f else: return f(*args, **kwargs) return wrapper
python
def valuemap(f): @wraps(f) def wrapper(*args, **kwargs): if 'value' in kwargs: val = kwargs['value'] del kwargs['value'] _f = f(*args, **kwargs) def valued_f(*args, **kwargs): result = _f(*args, **kwargs) s, obj, span = result if callable(val): return PegreResult(s, val(obj), span) else: return PegreResult(s, val, span) return valued_f else: return f(*args, **kwargs) return wrapper
[ "def", "valuemap", "(", "f", ")", ":", "@", "wraps", "(", "f", ")", "def", "wrapper", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "'value'", "in", "kwargs", ":", "val", "=", "kwargs", "[", "'value'", "]", "del", "kwargs", "[", "'...
Decorator to help PEG functions handle value conversions.
[ "Decorator", "to", "help", "PEG", "functions", "handle", "value", "conversions", "." ]
7bd2cd63ab7cf74803e1d6547b9ebc014b382abd
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/lib/pegre.py#L74-L94
8,796
delph-in/pydelphin
delphin/lib/pegre.py
literal
def literal(x): """ Create a PEG function to consume a literal. """ xlen = len(x) msg = 'Expected: "{}"'.format(x) def match_literal(s, grm=None, pos=0): if s[:xlen] == x: return PegreResult(s[xlen:], x, (pos, pos+xlen)) raise PegreError(msg, pos) return match_literal
python
def literal(x): xlen = len(x) msg = 'Expected: "{}"'.format(x) def match_literal(s, grm=None, pos=0): if s[:xlen] == x: return PegreResult(s[xlen:], x, (pos, pos+xlen)) raise PegreError(msg, pos) return match_literal
[ "def", "literal", "(", "x", ")", ":", "xlen", "=", "len", "(", "x", ")", "msg", "=", "'Expected: \"{}\"'", ".", "format", "(", "x", ")", "def", "match_literal", "(", "s", ",", "grm", "=", "None", ",", "pos", "=", "0", ")", ":", "if", "s", "[", ...
Create a PEG function to consume a literal.
[ "Create", "a", "PEG", "function", "to", "consume", "a", "literal", "." ]
7bd2cd63ab7cf74803e1d6547b9ebc014b382abd
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/lib/pegre.py#L97-L107
8,797
delph-in/pydelphin
delphin/lib/pegre.py
regex
def regex(r): """ Create a PEG function to match a regular expression. """ if isinstance(r, stringtypes): p = re.compile(r) else: p = r msg = 'Expected to match: {}'.format(p.pattern) def match_regex(s, grm=None, pos=0): m = p.match(s) if m is not None: start, end = m.span() data = m.groupdict() if p.groupindex else m.group() return PegreResult(s[m.end():], data, (pos+start, pos+end)) raise PegreError(msg, pos) return match_regex
python
def regex(r): if isinstance(r, stringtypes): p = re.compile(r) else: p = r msg = 'Expected to match: {}'.format(p.pattern) def match_regex(s, grm=None, pos=0): m = p.match(s) if m is not None: start, end = m.span() data = m.groupdict() if p.groupindex else m.group() return PegreResult(s[m.end():], data, (pos+start, pos+end)) raise PegreError(msg, pos) return match_regex
[ "def", "regex", "(", "r", ")", ":", "if", "isinstance", "(", "r", ",", "stringtypes", ")", ":", "p", "=", "re", ".", "compile", "(", "r", ")", "else", ":", "p", "=", "r", "msg", "=", "'Expected to match: {}'", ".", "format", "(", "p", ".", "patte...
Create a PEG function to match a regular expression.
[ "Create", "a", "PEG", "function", "to", "match", "a", "regular", "expression", "." ]
7bd2cd63ab7cf74803e1d6547b9ebc014b382abd
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/lib/pegre.py#L110-L126
8,798
delph-in/pydelphin
delphin/lib/pegre.py
nonterminal
def nonterminal(n): """ Create a PEG function to match a nonterminal. """ def match_nonterminal(s, grm=None, pos=0): if grm is None: grm = {} expr = grm[n] return expr(s, grm, pos) return match_nonterminal
python
def nonterminal(n): def match_nonterminal(s, grm=None, pos=0): if grm is None: grm = {} expr = grm[n] return expr(s, grm, pos) return match_nonterminal
[ "def", "nonterminal", "(", "n", ")", ":", "def", "match_nonterminal", "(", "s", ",", "grm", "=", "None", ",", "pos", "=", "0", ")", ":", "if", "grm", "is", "None", ":", "grm", "=", "{", "}", "expr", "=", "grm", "[", "n", "]", "return", "expr", ...
Create a PEG function to match a nonterminal.
[ "Create", "a", "PEG", "function", "to", "match", "a", "nonterminal", "." ]
7bd2cd63ab7cf74803e1d6547b9ebc014b382abd
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/lib/pegre.py#L129-L137
8,799
delph-in/pydelphin
delphin/lib/pegre.py
and_next
def and_next(e): """ Create a PEG function for positive lookahead. """ def match_and_next(s, grm=None, pos=0): try: e(s, grm, pos) except PegreError as ex: raise PegreError('Positive lookahead failed', pos) else: return PegreResult(s, Ignore, (pos, pos)) return match_and_next
python
def and_next(e): def match_and_next(s, grm=None, pos=0): try: e(s, grm, pos) except PegreError as ex: raise PegreError('Positive lookahead failed', pos) else: return PegreResult(s, Ignore, (pos, pos)) return match_and_next
[ "def", "and_next", "(", "e", ")", ":", "def", "match_and_next", "(", "s", ",", "grm", "=", "None", ",", "pos", "=", "0", ")", ":", "try", ":", "e", "(", "s", ",", "grm", ",", "pos", ")", "except", "PegreError", "as", "ex", ":", "raise", "PegreE...
Create a PEG function for positive lookahead.
[ "Create", "a", "PEG", "function", "for", "positive", "lookahead", "." ]
7bd2cd63ab7cf74803e1d6547b9ebc014b382abd
https://github.com/delph-in/pydelphin/blob/7bd2cd63ab7cf74803e1d6547b9ebc014b382abd/delphin/lib/pegre.py#L140-L151